Migrated my site to hugo

This commit is contained in:
Kaan Barmore-Genç 2022-04-10 22:44:06 -04:00
commit 5e0e484008
105 changed files with 17796 additions and 0 deletions

6
.gitattributes vendored Normal file
View File

@ -0,0 +1,6 @@
*.png filter=lfs diff=lfs merge=lfs -text
*.jpg filter=lfs diff=lfs merge=lfs -text
*.svg filter=lfs diff=lfs merge=lfs -text
*.gif filter=lfs diff=lfs merge=lfs -text
*.mp4 filter=lfs diff=lfs merge=lfs -text
*.mp3 filter=lfs diff=lfs merge=lfs -text

2
.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
public
.hugo_build.lock

6
.gitmodules vendored Normal file
View File

@ -0,0 +1,6 @@
[submodule "themes/hello-friend-ng"]
path = themes/hello-friend-ng
url = https://github.com/rhazdon/hugo-theme-hello-friend-ng.git
[submodule "themes/catafalque"]
path = themes/catafalque
url = git@github.com:SeriousBug/hugo-theme-catafalque.git

5
archetypes/default.md Normal file
View File

@ -0,0 +1,5 @@
---
title: "{{ replace .Name "-" " " | title }}"
date: {{ .Date }}
draft: true
---

208
config.toml Normal file
View File

@ -0,0 +1,208 @@
baseURL = "/"
title = "Kaan Barmore-Genç"
languageCode = "en-us"
theme = "catafalque"
PygmentsCodeFences = true
PygmentsStyle = "monokai"
paginate = 20
rssLimit = 60 # Maximum number of items in the RSS feed.
copyright = "Contents are licensed under CC 4.0 unless specified otherwise." # This message is only used by the RSS template.
# googleAnalytics = ""
# disqusShortname = ""
archetypeDir = "archetypes"
contentDir = "content"
dataDir = "data"
layoutDir = "layouts"
publishDir = "public"
buildDrafts = false
buildFuture = false
buildExpired = false
canonifyURLs = true
enableRobotsTXT = true
enableGitInfo = false
enableEmoji = true
enableMissingTranslationPlaceholders = false
disableRSS = false
disableSitemap = false
disable404 = false
disableHugoGeneratorInject = false
[permalinks]
posts = "/:filename/"
[author]
name = "Kaan Barmore-Genç"
[blackfriday]
hrefTargetBlank = true
#[taxonomies]
# tag = "tags"
# category = "categories"
# series = "series"
[params]
dateform = "Jan 2, 2006"
dateformShort = "Jan 2"
dateformNum = "2006-01-02"
dateformNumTime = "2006-01-02 15:04"
# Metadata mostly used in document's head
#
description = "Website of Kaan Barmore-Genç, and his personal blog"
keywords = ""
images = [""]
# Home subtitle of the index page.
#
homeSubtitle = [
"Hi! I'm a Software Engineer at Dendron, and a recent Master's graduate from the Ohio State University. I'm an avid Linux user, an enthusiast of many programming languages, a home cook, and an amateur gardener.",
"My interests include building web and mobile applications, both front and back end. Over the years I learned and used many programming languages and technologies, including JavaScript, TypeScript, React, React Native, Python, Java, C, C++, Clojure, Rust, and Haskell. Pretty much everthing I've worked on is open source and available on my Github page.",
]
# Set a background for the homepage
# backgroundImage = "assets/images/background.jpg"
# Prefix of link to the git commit detail page. GitInfo must be enabled.
#
# gitUrl = ""
# Set disableReadOtherPosts to true in order to hide the links to other posts.
#
disableReadOtherPosts = false
# Enable theme toggle
#
# This options enables the theme toggle for the theme.
# Per default, this option is off.
# The theme is respecting the prefers-color-scheme of the operating systeme.
# With this option on, the page user is able to set the scheme he wants.
enableThemeToggle = true
# Sharing buttons
#
# There are a lot of buttons preconfigured. If you want to change them,
# generate the buttons here: https://sharingbuttons.io
# and add them into your own `layouts/partials/sharing-buttons.html`
#
enableSharingButtons = false
# Global language menu
#
# Enables the global language menu.
#
enableGlobalLanguageMenu = false
# Integrate Javascript files or stylesheets by adding the url to the external assets or by
# linking local files with their path relative to the static folder, e.g. "css/styles.css"
#
customCSS = []
customJS = []
# Toggle this option need to rebuild SCSS, requires extended version of Hugo
#
justifyContent = false # Set "text-align: justify" to .post-content.
# Custom footer
# If you want, you can easily override the default footer with your own content.
#
[params.footer]
trademark = false
rss = true
copyright = true
author = false
topText = []
# bottomText = [
# "Powered by <a href=\"http://gohugo.io\">Hugo</a>",
# "Made with &#10084; by <a href=\"https://github.com/rhazdon\">Djordje Atlialp</a>",
# ]
# Colors for favicons
#
[params.favicon.color]
mask = "#1b1c1d"
msapplication = "#1b1c1d"
theme = "#1b1c1d"
[params.logo]
logoMark = ">"
logoText = "Kaan Barmore-Genç"
logoHomeLink = "/"
# Set true to remove the logo cursor entirely.
# logoCursorDisabled = false
# Set to a valid CSS color to change the cursor in the logo.
# logoCursorColor = "#67a2c9"
# Set to a valid CSS time value to change the animation duration, "0s" to disable.
# logoCursorAnimate = "2s"
# Commento is more than just a comments widget you can embed —
# its a return to the roots of the internet.
# An internet without the tracking and invasions of privacy.
# An internet that is simple and lightweight.
# An internet that is focused on interesting discussions, not ads.
# A better internet.
# Uncomment this to enable Commento.
#
# [params.commento]
# url = ""
# Uncomment this if you want a portrait on your start page
#
# [params.portrait]
# path = "/img/image.jpg"
# alt = "Portrait"
# maxWidth = "50px"
# Social icons
[[params.social]]
name = "mastodon"
url = "https://mastodon.technology/@kaan"
[[params.social]]
name = "email"
url = "mailto:kaan@bgenc.net"
[[params.social]]
name = "github"
url = "https://github.com/SeriousBug/"
[[params.social]]
name = "linkedin"
url = "https://www.linkedin.com/in/kaan-genc-8489b9205/"
[[params.social]]
name = "cv"
title = "CV"
url = "/extra/cv.pdf"
# [languages]
# [languages.en]
# subtitle = "Hello Friend NG Theme"
# weight = 1
# copyright = '<a href="https://creativecommons.org/licenses/by-nc/4.0/" target="_blank" rel="noopener">CC BY-NC 4.0</a>'
# [languages.fr]
# subtitle = "Hello Friend NG Theme"
# weight = 2
# copyright = '<a href="https://creativecommons.org/licenses/by-nc/4.0/" target="_blank" rel="noopener">CC BY-NC 4.0</a>'
[menu]
#[[menu.main]]
#identifier = "about"
#name = "About"
#url = "about/"
[[menu.main]]
identifier = "posts"
name = "Blog"
url = "posts/"
[gmnhg]
baseUrl = "gemini://gemini.bgenc.net"

Binary file not shown.

Binary file not shown.

BIN
content/extra/cv.pdf Executable file

Binary file not shown.

70
content/extra/emacs.css Normal file
View File

@ -0,0 +1,70 @@
/* pygments.org "emacs" style */
.highlight .hll { background-color: #ffffcc }
.highlight { background: #f8f8f8; }
.highlight .c { color: #008800; font-style: italic } /* Comment */
.highlight .err { border: 1px solid #FF0000 } /* Error */
.highlight .k { color: #AA22FF; font-weight: bold } /* Keyword */
.highlight .o { color: #666666 } /* Operator */
.highlight .ch { color: #008800; font-style: italic } /* Comment.Hashbang */
.highlight .cm { color: #008800; font-style: italic } /* Comment.Multiline */
.highlight .cp { color: #008800 } /* Comment.Preproc */
.highlight .cpf { color: #008800; font-style: italic } /* Comment.PreprocFile */
.highlight .c1 { color: #008800; font-style: italic } /* Comment.Single */
.highlight .cs { color: #008800; font-weight: bold } /* Comment.Special */
.highlight .gd { color: #A00000 } /* Generic.Deleted */
.highlight .ge { font-style: italic } /* Generic.Emph */
.highlight .gr { color: #FF0000 } /* Generic.Error */
.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */
.highlight .gi { color: #00A000 } /* Generic.Inserted */
.highlight .go { color: #888888 } /* Generic.Output */
.highlight .gp { color: #000080; font-weight: bold } /* Generic.Prompt */
.highlight .gs { font-weight: bold } /* Generic.Strong */
.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
.highlight .gt { color: #0044DD } /* Generic.Traceback */
.highlight .kc { color: #AA22FF; font-weight: bold } /* Keyword.Constant */
.highlight .kd { color: #AA22FF; font-weight: bold } /* Keyword.Declaration */
.highlight .kn { color: #AA22FF; font-weight: bold } /* Keyword.Namespace */
.highlight .kp { color: #AA22FF } /* Keyword.Pseudo */
.highlight .kr { color: #AA22FF; font-weight: bold } /* Keyword.Reserved */
.highlight .kt { color: #00BB00; font-weight: bold } /* Keyword.Type */
.highlight .m { color: #666666 } /* Literal.Number */
.highlight .s { color: #BB4444 } /* Literal.String */
.highlight .na { color: #BB4444 } /* Name.Attribute */
.highlight .nb { color: #AA22FF } /* Name.Builtin */
.highlight .nc { color: #0000FF } /* Name.Class */
.highlight .no { color: #880000 } /* Name.Constant */
.highlight .nd { color: #AA22FF } /* Name.Decorator */
.highlight .ni { color: #999999; font-weight: bold } /* Name.Entity */
.highlight .ne { color: #D2413A; font-weight: bold } /* Name.Exception */
.highlight .nf { color: #00A000 } /* Name.Function */
.highlight .nl { color: #A0A000 } /* Name.Label */
.highlight .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */
.highlight .nt { color: #008000; font-weight: bold } /* Name.Tag */
.highlight .nv { color: #B8860B } /* Name.Variable */
.highlight .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */
.highlight .w { color: #bbbbbb } /* Text.Whitespace */
.highlight .mb { color: #666666 } /* Literal.Number.Bin */
.highlight .mf { color: #666666 } /* Literal.Number.Float */
.highlight .mh { color: #666666 } /* Literal.Number.Hex */
.highlight .mi { color: #666666 } /* Literal.Number.Integer */
.highlight .mo { color: #666666 } /* Literal.Number.Oct */
.highlight .sa { color: #BB4444 } /* Literal.String.Affix */
.highlight .sb { color: #BB4444 } /* Literal.String.Backtick */
.highlight .sc { color: #BB4444 } /* Literal.String.Char */
.highlight .dl { color: #BB4444 } /* Literal.String.Delimiter */
.highlight .sd { color: #BB4444; font-style: italic } /* Literal.String.Doc */
.highlight .s2 { color: #BB4444 } /* Literal.String.Double */
.highlight .se { color: #BB6622; font-weight: bold } /* Literal.String.Escape */
.highlight .sh { color: #BB4444 } /* Literal.String.Heredoc */
.highlight .si { color: #BB6688; font-weight: bold } /* Literal.String.Interpol */
.highlight .sx { color: #008000 } /* Literal.String.Other */
.highlight .sr { color: #BB6688 } /* Literal.String.Regex */
.highlight .s1 { color: #BB4444 } /* Literal.String.Single */
.highlight .ss { color: #B8860B } /* Literal.String.Symbol */
.highlight .bp { color: #AA22FF } /* Name.Builtin.Pseudo */
.highlight .fm { color: #00A000 } /* Name.Function.Magic */
.highlight .vc { color: #B8860B } /* Name.Variable.Class */
.highlight .vg { color: #B8860B } /* Name.Variable.Global */
.highlight .vi { color: #B8860B } /* Name.Variable.Instance */
.highlight .vm { color: #B8860B } /* Name.Variable.Magic */
.highlight .il { color: #666666 } /* Literal.Number.Integer.Long */

BIN
content/extra/kaangenc.gpg Normal file

Binary file not shown.

BIN
content/img/2022-03-29-00-16-13.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
content/img/2022-03-29-00-17-38.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
content/img/2022-03-29-00-20-48.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
content/img/2022-03-29-00-22-48.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
content/img/ace-jump-mode.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
content/img/app-search-bar.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
content/img/cc-by-sa-4.0-88x31.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
content/img/company-flycheck.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
content/img/deus-ex-render-settings.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
content/img/deus-ex-renderer-comparison.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
content/img/docview.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
content/img/elfeed.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
content/img/emacs-terminal.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
content/img/erc.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
content/img/eshell.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
content/img/eww.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
content/img/game-cover.jpg (Stored with Git LFS) Normal file

Binary file not shown.

BIN
content/img/magit.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
content/img/mu4e.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
content/img/passmenu.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
content/img/password_store.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
content/img/profile.jpg (Stored with Git LFS) Normal file

Binary file not shown.

View File

@ -0,0 +1,117 @@
---
title: Solving `app_data` or `ReqData` missing in requests for actix-web
date: 2022-03-26
---
> This post is day 5 of me taking part in the
> [#100DaysToOffload](https://100daystooffload.com/) challenge.
I'm using `actix-web` to set up a web server, and I've been hitting a small
problem that I think other people may come across too.
To explain the problem, let me talk a bit about my setup. I have a custom
middleware that checks if a user is authorized to access a route. It looks like
this:
```rust
impl<S: 'static, B> Service<ServiceRequest> for CheckLoginMiddleware<S>
where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
S::Future: 'static,
{
type Response = ServiceResponse<EitherBody<B>>;
type Error = Error;
type Future = LocalBoxFuture<'static, Result<Self::Response, Self::Error>>;
dev::forward_ready!(service);
fn call(&self, req: ServiceRequest) -> Self::Future {
let state = self.state.clone();
let (request, payload) = req.into_parts();
let service = self.service.clone();
let user_token = get_token_from_header(&request);
let path_token = if self.allow_path_tokens {
get_token_from_query(&request)
} else {
None
};
Box::pin(async move {
match verify_auth(state, user_token, path_token, request.path()).await {
Ok(authorized) => {
tracing::debug!("Request authorized, inserting authorization token");
// This is the "important bit" where we insert the authorization token into the request data
request.extensions_mut().insert(authorized);
let service_request =
service.call(ServiceRequest::from_parts(request, payload));
service_request
.await
.map(ServiceResponse::map_into_left_body)
}
Err(err) => {
let response = HttpResponse::Unauthorized().json(err).map_into_right_body();
Ok(ServiceResponse::new(request, response))
}
}
})
}
}
```
The `verify_auth` function is omitted, but the gist of it is that it returns an `Result<Authorized, Error>`.
If the user is authorized, the authorization token `verify_auth` returned is then attached to the request.
Then here's how I use it in a path:
```rust
#[delete("/{store}/{path:.*}")]
async fn delete_storage(
params: web::Path<(String, String)>,
// This parameter is automatically filled with the token
authorized: Option<ReqData<Authorized>>,
) -> Result<HttpResponse, StorageError> {
let (store, path) = params.as_ref();
let mut store_path = get_authorized_path(&authorized, store)?;
store_path.push(path);
if fs::metadata(&store_path).await?.is_file() {
tracing::debug!("Deleting file {:?}", store_path);
fs::remove_file(&store_path).await?;
} else {
tracing::debug!("Deleting folder {:?}", store_path);
fs::remove_dir(&store_path).await?;
}
Ok(HttpResponse::Ok().finish())
}
```
This setup worked for this path, but would absolutely not work for another path.
I inserted logs to track everything, and just found that the middleware would
insert the token, but the path would just get `None`. How‽ I tried to slowly
strip everything away from the non-functional path until it was identical to
this one, but it still would not work.
Well it turns out the solution was very simple, see this:
```rust
use my_package::storage::put_storage;
use crate::storage::delete_storage;
```
Ah! They are imported differently. I had set up my program as both a library and
a program for various reasons. However, it turns out importing the same thing
from `crate` is different from importing it from the library. Because of the
difference in import, Actix doesn't recognize that the types match, so the route
can't access the attached token.
The solution is normalizing the imports. I went with going through the library
for everything, because that's what `rust-analyzer`s automatic import seems to
prefer.
```rust
use my_package::storage::{put_storage, delete_storage};
```
Solved!

10
content/posts/bash.md Normal file
View File

@ -0,0 +1,10 @@
---
title: Writing a Program in Bash
date: 2015-04-12
---
I don't really know why, but writing code in Bash makes me kinda anxious. It feels really old, outdated, and confusing. Why can't a function return a string? And no classes, or even data types? After getting confused, usually, I just end up switching to Python.
<!--more-->
But this time, I decided to stick with Bash. And I am surprised. It is unbelievebly good. I must say, now I understand the Unix philosophy much better. Having small programs doing one thing very good allows you to combine the power of those programs in your scripts. You think your favourite programming language has a lot of libraries? Well, bash has access to more. The entire Unix ecosystem powers bash. Converting videos, taking screenshots, sending mails, downloading and processing pages; there are already command line tools for all of that, and you have great access to all of them.
The program I've started writing is called [WoWutils](https://github.com/SeriousBug/WoWutils). And I'm still shocked at just how much functionality I have added with so little code. If you are considering writing a program in Bash too, just go through with it. It really is very powerful.

View File

@ -0,0 +1,35 @@
---
title: "Black Crown Initiate"
date: 2022-04-02
---
> This post is day 9 of me taking part in the
> [#100DaysToOffload](https://100daystooffload.com/) challenge.
I love metal, I've been listening to metal since I was 13. It was the first
music genre that I actually liked: until I discovered metal I actually thought I
didn't like music at all, because nothing I heard on the radio or heard my
friends listening to were interesting to me. My taste in music has expanded and
changed over the years to include different types of music and genres, but metal
remains the one I love the most.
Demonstrating my metal-worthiness aside, I've always listened to European metal
bands. I had this weird elitist thought that "good" metal could only come from
Europe, with exceptions for some non-European bands, and that American metal was
just always bad. This is obviously false, but I just had never came across
anything American that I had liked. That's until recently.
I recently came across [Black Crown Initiate](https://www.metal-archives.com/bands/Black_Crown_Initiate/3540386765),
a progressive death metal band from Pennsylvania. And I have to tell you that they are amazing.
Their first release "Song of the Crippled Bull" is absolutely amazing. The music
is just the right amount of metal and progressive, and lyrics are amazing. The
clean vocals get the themes of the song across, while the growls give a lot of
power to the songs. My favorite songs from this release are "Stench of the Iron
Age" and the title track "Song of the Crippled Bull". Other hightlights from the
band I've listened to so far include "A Great Mistake", "Death Comes in
Reverse", "Vicious Lives".
I'm still making my way through their songs, but I'm glad to have discovered
something from America that I absolutely love. I'm now trying to find more
non-European bands that I enjoy.

View File

@ -0,0 +1,56 @@
---
title: An introduction to Bulgur Cloud - simple self hosted cloud storage
date: 2022-03-29
---
> This post is day 8 of me taking part in the
> [#100DaysToOffload](https://100daystooffload.com/) challenge.
I've been recently working on Bulgur Cloud, a self hosted cloud storage
software. It's essentially Nextcloud, minus all the productivity software. It's
also designed to be much simpler, using no databases and keeping everything on
disk.
The software is still too early to actually demo, but the frontend is at a point
where I can show some features off. So I wanted to show it off.
![A white web page with the words "Bulgur Cloud". Below is "Simple and delicious cloud storage and sharing". Under that are two fields titled "Username" and "Password", and a black button titled "Login".](/img/2022-03-29-00-17-38.png)
I've been going for a clean "print-like" look. I think it's going pretty well so far.
![A web page with 3 files listed, "sprite-fright.mp4", "test.txt", and "sprite-fright.LICENSE.txt". There are pencil and thrash bin symbols to the right of the file names. A leftward arrow is grayed out on the top left, and top right says "kaan". On the bottom right there's a symbol of a cloud with an up arrow.](/img/2022-03-29-00-16-13.png)
I'm not sure about the details of how the directory listing will look. I don't
think I like the upload button in the corner, and the rename and delete icons
feel like they would be easy to mis-press. There is a confirmation before
anything is actually deleted, but it still would be annoying.
![A pop up that says "Delete file "text.txt", with the buttons "Delete" and "Cancel" below it.](/img/2022-03-29-00-20-48.png)
Something I'm pretty happy with is the file previews. I've added support for
images, videos, and PDFs. Video support is restricted by whatever formats are
supported by your browser, the server doesn't do any transcoding, but I think
it's still very useful for a quick preview. I'm also planning on support for
audio files. The server supports range requests, so you can seek around in the
video without waiting to download everything (although I've found that Firefox
doesn't handle that very well).
![A page with the text "sprite-fright.mp4", and a video player below showing a frame from the movie. Below the player is a link that says "Download this file".](/img/2022-03-29-00-22-48.png)
This is a web interface only so far, but I'm planning to add support for mobile
and desktop apps eventually. I've been building the interface with React Native
so adding mobile/desktop support shouldn't be too difficult, but I've been
finding that "write once, run everywhere" isn't always that simple. I ended up
having to add web-only code to support stuff like the video and PDF previews, so
I'll have to find replacements for some parts. Mobile and desktop apps natively
support more video and audio formats too, and with native code you usually have
the kind of performance to transcode video if needed.
The backend is written in Rust with `actix-web`, using async operations. It's
incredibly fast, and uses a tiny amount of resources (a basic measurement
suggests < 2 MB of memory used). I'm pretty excited about it!
After a few more features (namely being able to move files), I'm planning to put
together a demo to show this off live! The whole thing will be open source, but
I'm waiting until it's a bit more put together before I make the source public.
The source will go live at the same time as the demo.

View File

@ -0,0 +1,92 @@
---
title: Emacs and extensibility
date: 2015-10-06
---
Update: I've put the small Emacs tools I have written to a
[gist](https://gist.github.com/91c38ddde617b98ffbcb).
I have been using Emacs for some time, and I really love it. The
amount of power it has, and the customizability is incredible. What
other editor allow you to connect to a server over SSH and edit files,
which is what I am doing to write this post. How many editors or IDE's
have support for so many languages?
<!--more-->
One thing I didn't know in the past, however, is extensibility of
Emacs. I mean, I do use a lot of packages, but I had never written
Elisp and I didn't know how hard or easy it would be. But after
starting to learn Clojure a bit, and feeling more comfortable with
lots of parenthesis, I decided to extend Emacs a bit to make it fit
myself better.
The first thing I added is an "insert date" function. I use Emacs to
take notes during lessons -using Org-mode- and I start every note with
the date of the lesson. Sure, glancing at the date to the corner of my
screen and writing it down takes just a few seconds, but why not write
a command to do it for me? Here is what I came up with:
~~~commonlisp
(defun insert-current-date ()
"Insert the current date in YYYY-MM-DD format."
(interactive)
(shell-command "date +'%Y-%m-%d'" t))
~~~
Now that was easy and convenient. And being able to write my first
piece of Elisp so easily was really fun, so I decided to tackle
something bigger.
It is not rare that I need to compile and run a single C file. Nothing
fancy, no libraries, no makefile, just a single C file to compile and
run. I searched around the internet like "Emacs compile and run C", but
couldn't find anything. I had been doing this by opening a shell in
Emacs and compiling/running the program, but again, why not automate
it?
The code that follows is not really good. "It works" is as good as it
gets really, and actually considering that this is the first
substantial Elisp I have written, that is pretty impressive -for the
language and Emacs, which are both very helpful and powerful- I think.
```commonlisp
(require 's)
(defun compile-run-buffer ()
"Compile and run buffer."
(interactive)
(let* ((split-file-path (split-string buffer-file-name "/"))
(file-name (car (last split-file-path)))
(file-name-noext (car (split-string file-name "[.]")))
(buffer-name (concat "compile-run: " file-name-noext))
(buffer-name* (concat "*" buffer-name "*")))
(make-comint buffer-name "gcc" nil "-Wall" "-Wextra" "-o" file-name-noext file-name)
(switch-to-buffer-other-window buffer-name*)
(set-process-sentinel (get-buffer-process (current-buffer))
(apply-partially
'(lambda (prog-name proc even)
(if (s-suffix? "finished\n" even)
(progn
(insert "Compilation successful.\n\n")
(comint-exec (current-buffer) prog-name (concat "./" prog-name) nil nil))
(insert (concat "Compilation failed!\n" even))))
file-name-noext))))
```
Again, the code is not really good. I'm uploading it here right now
because I'm actually very excited that I wrote this. Just now I can
think of ways to improve this, for example moving the compiler and the
flags to variables so that they can be customized. I could also
improve the presentation, because strings printed by this function,
comint and the running programs mixes up. I'll update this blog post
if I get to updating the code.
If this is your first time hearing about Emacs, this post may look
very confusing. I don't to Emacs any justice here, so do check it out
somewhere like [Emacs rocks](http://emacsrocks.com/). On the other
hand, if you have been looking a functionality like this, hope this
helps. If you have any suggestions about the code, I'd love to hear
them, you can find my email on the "about me" page. Anyway, have a
good day!

View File

@ -0,0 +1,79 @@
---
title: Do kids not know computers now?
date: 2022-03-28
---
> This post is day 7 of me taking part in the
> [#100DaysToOffload](https://100daystooffload.com/) challenge.
One discussion point I've seen around is that kids nowadays don't know how to
use computers. Okay that's a bit of a strawman, but this article titled [File Not Found](https://www.theverge.com/22684730/students-file-folder-directory-structure-education-gen-z).
The gist of the article is that Gen-Z kids are too used to search interfaces.
That means they don't actually know about where files are stored, or how they
are organized. They only know that they can access the files by searching for
them. The article talks about how professors ended up having to teach them how
to navigate directory structures and file extensions.
As the article claims, it seems to be related to how modern user interfaces are
designed. Our UIs nowadays are more focused around search capabilities: you just
type in a search bar and find what you need.
![A desktop, displaying a bar with the words "launch", followed by "fi". On the right side of the bar are program names "Firefox", "fish", "Profiler", "Frontend", "Patch Fixes", and "Qt File Manager". Firefox is highlighted.](/img/app-search-bar.png)
In some sense I do like this sort of interface. I use something like that when
launching applications, both on my Desktop and on my laptop! It's actually a
better interface compared to hunting for icons on your desktop. I use similar
interfaces in VSCode to switch between open editor tabs.
However, this is a complimentary interface to hierarchy and organization. Going
back to the file systems example discussed in the article, being able to search
through your files and folders is useful. But it's not a replacement for
hierarchy. You can't just throw files into a folder, and expect to always find
them accurately.
Let me give an example with Google Photos. I have been keeping all my photos on
Google Photos, and between migrating photos from old phones and ones I have
taken on new phones, I have over 8,000 photos. This is completely disorganized
of course, but Google Photos has a search functionality. It even uses AI to
recognize the items in the photos, which you can use in the search. A search for
"tree" brings up photos of trees, "cat" brings up cats, and you can even tag
people and pets and then search for their names. Very useful, right?
Well, it is sometimes. I recently had to remember what my wife's car license
plate is. A quick search for "license plate" on google photos and luckily, I had
taken a photo of her car that included the license plate in the frame. Success!
On the other hand, I was trying to find some photos from a particular gathering
with my friends. Searches for their names, names of the place, or stuff I know
are in the picture turned up with nothing. I eventually had to painstakingly
scroll through all photos to find the one I wanted.
This reminds me of 2 things. One is this article named [To Organize The World's
Information](https://dkb.io/post/organize-the-world-information) by
[@dkb868@twitter.com](https://nitter.net/dkb868). One thing I found interesting
on that article was that the concept of "the library" has been lost over the
last few decades as a way to organize information. They define the library as a
hierarchical, categorized directory of information. The article also talks about
other organizational methods, and is worth a read.
The other thing is the note taking software we're building at my workplace,
[Dendron](https://dendron.so/). One of the core tenets of Dendron is that the
information is hierarchical. Something the founder Kevin recognizes was that
other note taking software make it easier to make new notes, but they don't
support hierarchical structures which makes it hard to find those notes later.
I've also experienced this, when I used other note taking software (or sticky
notes!) I found that it was easy to just jot down a few notes, but they very
quickly get lost or hard to find when you need them. A hierarchical organization
makes it possible to actually find and reference the information later.
Requiring organization creates a barrier of entry to storing information, but
what good is storing information if you can't retrieve the information later?
This seems to work pretty well with Dendron. Would it not work for other things?
Why not for taking photos? You of course want to be able to quickly snap a photo
so you can record a moment before it's gone, but perhaps you could be required
to organize your photos afterwards. Before modern cellphones & internet
connected cameras, you'd have to get your photos developed or transfer them off
an SD card: a step where you would have to (or have the opportunity to) organize
your photos. I wonder if we cloud services could ask you to organize your photos
before syncing them as well.

View File

@ -0,0 +1,57 @@
---
title: Taking Backups with Duplicity
date: 2015-05-16
---
I wanted to start taking backups for some time, but I haven't had the time to do any research and set everything up. After reading another [horror story that was saved by backups](https://www.reddit.com/r/linuxmasterrace/comments/35ljcq/couple_of_days_ago_i_did_rm_rf_in_my_home/), I decided to start taking some backups.
<!--more-->
After doing some research on backup options, I decided on [duplicity](http://duplicity.nongnu.org/). The backups are compressed, encrypted and incremental, both saving space and ensuring security. It supports both local and ssh files(as well as many other protocols), so it has everything I need.
I first took a backup into my external hard drive, then VPS. The main problem I encountered was that duplicity uses [paramiko](https://github.com/paramiko/paramiko) for ssh, but it wasn't able to negotiate a key exchange algorithm with my VPS. Luckily, duplicity also supports [pexpect](http://pexpect.sourceforge.net/pexpect.html), which uses OpenSSH. If you encounter the same problem, you just need to tell duplicity to use pexpect backend by prepending your url with `pexpect+`, like `pexpect+ssh://example.com`.
Duplicity doesn't seem to have any sort of configuration files of itself, so I ended up writing a small bash script to serve as a sort of configuration, and also keep me from running duplicity with wrong args. I kept forgetting to add an extra slash to `file://`, causing duplicity to backup my home directory into my home directory! :D
If anyone is interested, here's the script:
```bash
#!/bin/bash
if [[ $(id -u) != "0" ]]; then
read -p "Backup should be run as root! Continue? [y/N]" yn
case $yn in
[Yy]*) break;;
*) exit;;
esac
fi
if [[ $1 = file://* ]]; then
echo "Doing local backup."
ARGS="--no-encryption"
if [[ $1 = file:///* ]]; then
URL=$1
else
echo "Use absolute paths for backup."
exit 1
fi
elif [[ $1 = scp* ]]; then
echo "Doing SSH backup."
ARGS="--ssh-askpass"
URL="pexpect+$1"
else
echo "Unknown URL, use scp:// or file://"
exit 1
fi
if [[ -n "$1" ]]; then
duplicity $ARGS --exclude-filelist /home/kaan/.config/duplicity-files /home/kaan "$URL/backup"
else
echo "Please specify a location to backup into."
exit 1
fi
```

View File

@ -0,0 +1,238 @@
---
title: Emacs as an operating system
date: 2016-04-14
modified: 2016-05-29
---
Emacs is sometimes jokingly called a good operating system with a bad
text editor. Over the last year, I found myself using more and more of
Emacs, so I decided to try out how much of an operating system it
is. Of course, operating system here is referring to the programs that
the user interacts with, although I would love to try out some sort of
Emacs-based kernel.
<!--more-->
# Emacs as a terminal emulator / multiplexer
Terminals are all about text, and Emacs is all about text as well. Not
only that, but Emacs is also very good at running other processes and
interacting with them. It is no surprise, I think, that Emacs works
well as a terminal emulator.
Emacs comes out of the box with `shell` and `term`. Both of these
commands run the shell of your choice, and give you a buffer to
interact with it. Shell gives you a more emacs-y experience, while
term overrides all default keymaps to give you a full terminal
experience.
![A terminal interface, with the outputs of the commands "ls" and "git status" displayed.](/img/emacs-terminal.png)
To use emacs as a full terminal, you can bind these to a key in your
window manager. I'm using i3, and my keybinding looks like this:
```
bindsym $mod+Shift+Return exec --no-startup-id emacs --eval "(shell)"
```
You can also create a desktop file to have a symbol to run this on a
desktop environment. Try putting the following text in a file at
`~/.local/share/applications/emacs-terminal.desktop`:
```
[Desktop Entry]
Name=Emacs Terminal
GenericName=Terminal Emulator
Comment=Emacs as a terminal emulator.
Exec=emacs --eval '(shell)'
Icon=emacs
Type=Application
Terminal=false
StartupWMClass=Emacs
```
If you want to use term instead, replace `(shell)` above with `(term "/usr/bin/bash")`.
A very useful feature of terminal multiplexers is the ability to leave
the shell running, even after the terminal is closed, or the ssh
connection has dropped if you are connection over that. Emacs can also
achieve this with it's server-client mode. To use that, start emacs
with `emacs --daemon`, and then create a terminal by running
`emacsclient -c --eval '(shell)'`. Even after you close emacsclient,
since Emacs itself is still running, you can run the same command
again to get back to your shell.
One caveat is that if there is a terminal/shell already running, Emacs
will automatically open that whenever you try opening a new one. This
can be a problem if you are using Emacs in server-client mode, or want
to have multiple terminals in the same window. In that case, you can
either do `M-x rename-uniquely` to change the name of the existing
terminal, which will make Emacs create a new one next time, or you can
add that to hook in your `init.el` to always have that behaviour:
```lisp
(add-hook 'shell-mode-hook 'rename-uniquely)
(add-hook 'term-mode-hook 'rename-uniquely)
```
# Emacs as a shell
Of course, it is not enough that Emacs works as a terminal
emulator. Why not use Emacs as a shell directly, instead of bash/zsh?
Emacs has you covered for that too. You can use eshell, which is a
shell implementation, completely written in Emacs Lisp. All you need
to do is press `M-x eshell`.
![An Emacs window, split in two. Left side shows a command line with the command "cat README.rst >>> #<buffer *scratch*>". Right side shows the emacs scratch buffer, with the contents of the readme file displayed.](/img/eshell.png)
The upside is that eshell can evaluate and expand lisp expressions, as
well as redirecting the output to Emacs buffers. The downside is
however, eshell is not feature complete. It lacks some features such
as input redirection, and the documentation notes that it is
inefficient at piping output between programs.
If you want to use eshell instead of shell or term, you can replace
`shell` in the examples of terminal emulator section with `eshell`.
# Emacs as a mail cilent
[Zawinski's Law](http://www.catb.org/~esr/jargon/html/Z/Zawinskis-Law.html):
Every program attempts to expand until it can read mail. Of course, it
would be disappointing for Emacs to not handle mail as well.
Emacs already ships with some mail capability. To get a full
experience however, I'd recommend using
[mu4e](http://www.djcbsoftware.nl/code/mu/mu4e.html) (mu for emacs). I
have personally set up [OfflineIMAP](http://www.offlineimap.org/) to
retrieve my emails, and mu4e gives me a nice interface on top of that.
![An emacs window, displaying several emails on top with titles like "Announcing Docker Cloud", or "Order #29659 shipped". An email titles "Add 'url' option to 'list' command' is selected, and the bottom half of the window displays the contents of this email. Email display includes "From" and "To" fields, "Date", "Flags", and the body of the email.](/img/mu4e.png)
I'm not going to talk about the configurations of these programs, I'd
recommend checking out their documentations. Before ending this
section, I also want to mention
[mu4e-alert](https://github.com/iqbalansari/mu4e-alert) though.
# Emacs as a feed reader (RSS/Atom)
Emacs handles feeds very well too. The packages I'm using here are
[Elfeed](https://github.com/skeeto/elfeed) and
[Elfeed goodies](https://github.com/algernon/elfeed-goodies). Emacs
can even show images in the feeds, so it covers everything I need from
a feed reader.
![A window, with a list on the left displaying entries from "xkcd.com", "Sandra and Woo", and "The Codeless Code". An entry titled "Pipelines" is selected, and the right side of the window displays the contents of that XKCD.](/img/elfeed.png)
# Emacs as a file manager
Why use a different program to manage your files when you can use
Emacs? Emacs ships with dired, as well as image-dired. This gives you
a file browser, with optional image thumbnail support.
# Emacs as a document viewer
Want to read a pdf? Need a program to do a presentation? Again, Emacs.
![An emacs window displaying a PDF file, titled "Clojure for the Brave and True.pdf". The page includes some clojure code, and talks about Emacs.](/img/docview.png)
Emacs comes with
[DocView](https://www.gnu.org/software/emacs/manual/html_node/emacs/Document-View.html)
which has support for PDF, OpenDocument and Microsoft Office files. It
works surprisingly well.
Also, [PDF Tools](https://github.com/politza/pdf-tools) brings even
more PDF viewing capabilities to Emacs, including annotations, text
search and outline. After installing PDF Tools, Emacs has become my
primary choice for reading PDF files.
# Emacs as a browser
Emacs comes out of box with
[eww](https://www.gnu.org/software/emacs/manual/html_node/eww/index.html#Top),
a text-based web browser with support for images as well.
![An Emacs window, displaying the Wikipedia web page for Emacs.](/img/eww.png)
Honestly, I don't think I'll be using Emacs to browse the web. But
still, it is nice that the functionality is there.
# Emacs as a music player
Emacs can also act as a music player thanks to
[EMMS](https://www.gnu.org/software/emms/), Emacs MultiMedia
System. If you are wondering, it doesn't play the music by itself but
instead uses other players like vlc or mpd.
It has support for playlists, and can show thumbnails as well. For the
music types, it supports whatever the players it uses support, which
means that you can basically use file type.
# Emacs as a IRC client
I don't use IRC a lot, but Emacs comes out of the box with support for
that as well thanks to
[ERC](https://www.emacswiki.org/emacs?action=browse;oldid=EmacsIrcClient;id=ERC).
![An Emacs window, displaying an IRC chat for "#emacs@freenode".](/img/erc.png)
# Emacs as a text editor
Finally, Emacs also can work well as a text editor.
Emacs is a pretty fine text editor out of the box, but I want to
mention some packages here.
First,
[multiple cursors](https://github.com/magnars/multiple-cursors.el). Multiple
cursors mode allows you to edit text at multiple places at the same
time.
I also want to mention
[undo-tree](http://www.dr-qubit.org/emacs.php#undo-tree). It acts like
a mini revision control system, allowing you to undo and redo without
ever losing any text.
Another great mode is
[iy-go-to-char](https://github.com/doitian/iy-go-to-char). It allows
you to quickly jump around by going to next/previous occurrances of a
character. It is very useful when you are trying to move around a
line.
[Ace Jump Mode](https://github.com/winterTTr/ace-jump-mode/) allows
you to jump around the visible buffers. It can jump around based on
initial characters of words, or jump to specific lines. It can also
jump from one buffer to another, which is very useful when you have
several buffers open in your screen.
![An emacs window, with Python code displayed. Several locations within the code are highlighted with different letters.](/img/ace-jump-mode.png)
Finally, I want to mention [ag.el](https://github.com/Wilfred/ag.el),
which is an Emacs frontend for the silver searcher. If you don't know
about ag, it is a replacement for grep that recursively searches
directories, and has some special handling for projects, and is very
fast.
# Emacs as an IDE
People sometimes compare Emacs to IDE's and complain that a text
editor such as Emacs doesn't have enough features. What they are
forgetting, of course, is that Emacs is an operating system, and we
can have an IDE in it as well.
There are different packages for every language, so I'll be only
speaking on language agnostic ones.
For interacting with git, [magit](http://magit.vc/) is a wonderful
interface.
![An emacs window, displaying the git log for a repository at the top, and the shortcuts for git commands such as "Apply", "Stage", "Unstage" below.](/img/magit.png)
For auto-completion, [Company mode](https://company-mode.github.io/)
works wonders. I rely heavily on completion while writing code, and
company mode has support for anything I tried writing.
If you like having your code checked as you type,
[flycheck](https://www.flycheck.org/) has you covered. It has support
for many tools and languages.
![A C code file, with the letters "st" are written. A pop-up below the cursor displays options like "strcat", "strchr", "strcmp" and more.](/img/company-flycheck.png)

View File

@ -0,0 +1,98 @@
---
title: Getting Deus Ex GOTY Edition running on Linux
date: 2022-03-12
---
I've been struggling with this for a few hours, so I might as well document how
I did it.
I have a particular setup, which ended up causing issues. Most important are
that I'm using Sway, a tiling Wayland compositor, and a flatpak install of
Steam.
## Mouse doesn't move when the game is launched
It looks like there's a problem with the game window grabbing the cursor on my
setup, so moving the mouse doesn't move the cursor in the game and if you move
it too much to the side it takes you out of the game window.
The solution to this is using Gamescope, which is a nested Wayland compositor
that makes the window inside it play nice with your actual compositor.
Because I'm using the flatpak install of Steam, I needed to install the
[flatpak version of gamescope](https://github.com/flathub/com.valvesoftware.Steam.Utility.gamescope).
One catch here is that for me, this wouldn't work if I also had the flatpak MangoHud installed.
The only solution I could come up with right now was to uninstall MangoHud.
```bash
flatpak remove org.freedesktop.Platform.VulkanLayer.MangoHud # if you have it installed
flatpak install com.valvesoftware.Steam.Utility.gamescope
```
Then, right click on the game and select properties, then in launch options type
`gamescope -f -- %command%`. This will launch the game inside gamescope, and the
cursor should move inside the game now.
## The game is too dark to see anything
It looks like the game relied on some old DirectX or OpenGL features or
something, because once you do launch into the game, everything is extremely
dark and hard to see. At first I was wondering how anyone could play the game
like this, but it turns out that's not how the game is supposed to look!
I finally managed to solve this by following the installer steps for the
[Deus Ex CD on Lutris](https://lutris.net/games/install/948/view). Yeah,
roundabout way to solve it, but it worked.
First download the updated D3D9 and OpenGL renderers from the page, and extract
them into the `System` folder inside the game.
```bash
cd "$HOME/.var/app/com.valvesoftware.Steam/.steam/steam/steamapps/common/Deus Ex/System"
wget https://lutris.net/files/games/deus-ex/dxd3d9r13.zip
wget https://lutris.net/files/games/deus-ex/dxglr20.zip
unzip dxd3d9r13.zip
unzip dxglr20.zip
```
Next, download and install the `1112fm` patch.
```bash
cd "$HOME/.var/app/com.valvesoftware.Steam/.steam/steam/steamapps/common/Deus Ex/System"
wget https://lutris.net/files/games/deus-ex/DeusExMPPatch1112fm.exe
env WINEPREFIX="$HOME/.var/app/com.valvesoftware.Steam/.steam/steam/steamapps/compatdata/6910/pfx/" wine DeusExMPPatch1112fm.exe
```
Follow the steps of the installer. It should automatically find where the game
is installed. Once the install is done, launch the game, then head into the
settings and pick "Display Settings", then "Rendering Device". In the renderer
selection window, pick "Show all devices", and then select "Direct3D9 Support".
![A window with the title "Deus Ex" in a stylized font. Below it lists several options such as "Direct3D Support", "Direct3D9 Support", and "OpenGL Support". Direct3D9 is selected. Below are two radio buttons, with the one titled "Show all devices" selected.](/img/deus-ex-render-settings.png)
Launch back into the game, head into the display settings again, pick your
resolution, and restart the game. Then head into the display settings yet again,
this time change the color depth to 32 bit. Restart once more. Yes, you do have
to do them separately or the game doesn't save the color depth change for some
reason. Finally, you can start playing!
![A game screenshot displaying the Statue of Liberty in front of a cityscape. Closer to the player are wooden docks. The image is split down the middle, left side says "before" and is very dark, the right side says "after" and is much lighter.](/img/deus-ex-renderer-comparison.png)
## Other small issues
Here are a few more issues you might hit during this whole process:
> My cursor moves too fast!
You need to turn down the cursor speed. My mouse has buttons to adjust the speed on the fly, so I use those to turn down the speed.
> After changing resolution, I can't move my cursor!
Use the keyboard shortcuts (arrow keys and enter) to exit the game. It should work again when you restart.
> The cursor doesn't move when I open the game, even with gamescope!
I'm not fully sure why or how this happens, but a few things I found useful:
- When the game is launching, and it's showing the animation of the studio logo, don't click! Press escape to bring up the menu instead.
- Press escape to bring the menu up, then hit escape again to dismiss it. It sometimes starts working after that.
- Use the keyboard to exit the game and restart. It always works the next time for me.

View File

@ -0,0 +1,166 @@
---
title: "Managing my recipes with Dendron"
date: 2022-04-04
---
> This post is day 10 of me taking part in the
> [#100DaysToOffload](https://100daystooffload.com/) challenge.
I like to cook at home, but for a long time I never wrote down or saved any of
my recipes. Because of that I would occasionally completely forget how to make
something. My mom, and my grandmom write down their recipes in notebooks, but I
want something more powerful and resilient than some pen and paper.
At first I tried writing down my recipes in Google Keep, but found it a bit
tedious. That's where Dendron came in. Dendron is a knowledge management and
note taking tool. It comes with a features that enhance the writing experience,
but more importantly it has a lot of features that enhance the discoverability
of what you wrote.
For reference, I have the [repository for the recipes](https://gitea.bgenc.net/kaan/recipes) available publicly.
## Setup
[Dendron](https://marketplace.visualstudio.com/items?itemName=dendron.dendron)
is an extension for Visual Studio Code, so you'll need to install both. There's
a great tutorial to go through, but I'm already experienced with it so I went
ahead and created a new workspace that I called "recipes".
Next, I created a template and a schema to help me write new recipes. The
template is just a regular Dendron note, which I named `templates.recipe`.
```md
* Servings:
* Calories:
* Protein:
* Fiber:
## Ingredients
## Instructions
## Notes
```
This template immediately gives me the basic structure of a recipe. I have the
ingredients and instructions, and then I have a place to put any additional
notes about the recipe (for example, things I want to change next time I cook
it, or how to serve it best). I also have a section at the top to fill out some
nutritional information. I use the mobile app Cronometer to calculate that,
although most of the time I don't bother because it's just a nice-to-have that I
don't really need.
Next, here's my schema.
```yml
version: 1
imports: []
schemas:
- id: recipes
title: recipes
parent: root
children:
- id: bowls
title: bowls
namespace: true
template: templates.recipe
- id: baked
title: baked
namespace: true
template: templates.recipe
- id: dessert
title: dessert
namespace: true
template: templates.recipe
- id: misc
title: misc
namespace: true
template: templates.recipe
- id: soup
title: soup
namespace: true
template: templates.recipe
```
The schema helps me keep my recipes organized (and also automatically applies
the template note). You can see that I have my recipes organized under `bowls`
for stuff like rice and pasta dishes, `baked` for bread, pies and anything else
where you bake everything, `dessert` and `soup` which are self descriptive, and
`misc` which holds anything else like salad toppings.
## Publishing
I publish my [recipes online](https://bgenc.net/recipes/), which makes it very
easy to pull up a recipe when I'm cooking or at the grocery store.
I use a self-hosted setup, so all I have to do is just run the Dendron CLI to
build the site. To automate this process, I set up some VSCode tasks to build
and publish the site.
```json
{
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
"version": "2.0.0",
"tasks": [
{
"label": "build site",
"type": "shell",
"command": "dendron publish export",
"options": {
"cwd": "${workspaceFolder}"
}
},
{
"label": "publish site",
"type": "shell",
"command": "rsync -av .next/out/ /var/www/recipes/",
"options": {
"cwd": "${workspaceFolder}"
},
"dependsOn": ["build site"],
"problemMatcher": []
},
]
}
```
I think before running these commands, you first have to run `dendron publish init && dendron publish build` first.
The first task builds the site using Dendron, and then the second task copies
the generated static website to where I have it published. I'm running a web
server on my desktop so this is just a folder, but `rsync` can also copy things
over SSH if you host your site on a different machine. There are also
[tutorials](https://wiki.dendron.so/notes/x0geoUlKJzmIs4vlmwLn3/) for things
like Github pages or Netlify.
Because I'm publishing under a subfolder (`.../recipes`), I also had to set
`assetsPrefix` in my `dendron.yml` configuration file.
```yml
publishing:
assetsPrefix: "/recipes"
...
```
## Bonus: What do I cook this week?
My wife and I go shopping once a week, so every week we need to decide what
we're going to eat this week. Sometimes it can be hard to pick something to eat
though! Luckily, Dendron comes with a command `Dendron: Random Note` which shows
you a random note. You can even configure it to only show some notes, which I
used so it will only show me recipes.
```yml
commands:
randomNote:
include:
- "recipes"
```
Now when I'm having trouble picking, I can just use this command and get
something to cook!

View File

@ -0,0 +1,60 @@
---
title: Mass batch processing on the CLI
date: 2022-03-19
---
> This post is day 4 of me taking part in the
> [#100DaysToOffload](https://100daystooffload.com/) challenge.
Some time ago, I needed to process a lot of video files with vlc. This is
usually pretty easy to do, `for file in *.mp4 ; do ffmpeg ... ; done` is about
all you need in most cases. However, sometimes the files you are trying to
process are in different folders. And sometimes you want to process some files
in a folder but not others. That's the exact situation I was in, and I was
wondering if I needed to find some graphical application with batch processing
capabilities so I can queue up all the processing I need.
After a bit of thinking though, I realized I could do this very easily with a
simple shell script! That shell script lives in my [mark-list](https://github.com/SeriousBug/mark-list)
repository.
The idea is simple, you use the command to mark a bunch of files. Every file you
mark is saved into a file for later use.
```bash
$ mark-list my-video.mp4 # Choose a file
Marked 1 file.
$ mark-list *.webm # Choose many files
Marked 3 files.
$ cd Downloadsr
$ mark-list last.mpg # You can go to other directories and keep marking
```
You can mark a single file, or a bunch of files, or even navigate to other
directories and mark files there.
Once you are done marking, you can recall what you marked with the same tool:
```bash
$ mark-list --list
/home/kaan/my-video.mp4
/home/kaan/part-1.webm
/home/kaan/part-2.webm
/home/kaan/part-3.webm
/home/kaan/Downloads/last.mpg
```
You can then use this in the command line. For example, I was trying to convert everything to `mkv` files.
```bash
for file in `mark-list --list` ; do ffmpeg -i "${file}" "${file}.mkv" ; done
```
It works! After you are done with it, you then need to clear out your marks:
```
mark-list --clear
```
Hopefully this will be useful for someone else as well. It does make it a lot
easier to just queue up a lot of videos, and convert all of them overnight.

58
content/posts/mpv.md Normal file
View File

@ -0,0 +1,58 @@
---
title: Motion Interpolation, 24 FPS to 60 FPS with mpv, VapourSynth and MVTools
date: 2015-07-18
modified: 2015-07-20
---
Watching videos at 60 FPS is great. It makes the video significantly smoother and much more enjoyable. Sadly, lots of movies and TV shows are still at 24 FPS. However, I recently discovered that it is actually possible to interpolate the extra frames by using motion interpolation, and convert a video from 24 FPS to 60 FPS in real time. While it is far from perfect, I think the visual artifacts are a reasonable tradeoff for high framerate.
<!--more-->
Firstly, what we need is mpv with VapourSynth enabled, and MVTools plugin for VapourSynth. VapourSynth must be enabled while compiling mpv. I adopted an AUR package [mpv-vapoursynth](https://aur4.archlinux.org/packages/mpv-vapoursynth/) which you can use if you are on Arch. Otherwise, all you need to do is use `--enable-vapoursynth` flag when doing `./waf --configure`. They explain the compilation on their [repository](https://github.com/mpv-player/mpv), so look into there if you are compiling yourself.
After that, we need MVTools plugin for VapourSynth. This is available on Arch via [vapoursynth-plugin-mvtools](https://www.archlinux.org/packages/community/x86_64/vapoursynth-plugin-mvtools/), otherwise you can find their repository [here](https://github.com/dubhater/vapoursynth-mvtools). There is also a [PPA for Ubuntu](https://launchpad.net/~djcj/+archive/ubuntu/vapoursynth) where you can find `vapoursynth-extra-plugins`, but I haven't used it myself so I can't comment on it.
After both of these are enabled, we need a script to use MVTools from VapourSynth. There is one written by Niklas Haas, which you can find here as [mvtools.vpy](https://github.com/haasn/gentoo-conf/blob/master/home/nand/.mpv/filters/mvtools.vpy). Personally, I tweaked the block sizes and precision to my liking, as well as removing the resolution limit he added. I'll put the modified version here:
```python
# vim: set ft=python:
import vapoursynth as vs
core = vs.get_core()
clip = video_in
dst_fps = display_fps
# Interpolating to fps higher than 60 is too CPU-expensive, smoothmotion can handle the rest.
while (dst_fps > 60):
dst_fps /= 2
# Skip interpolation for 60 Hz content
if not (container_fps > 59):
src_fps_num = int(container_fps * 1e8)
src_fps_den = int(1e8)
dst_fps_num = int(dst_fps * 1e4)
dst_fps_den = int(1e4)
# Needed because clip FPS is missing
clip = core.std.AssumeFPS(clip, fpsnum = src_fps_num, fpsden = src_fps_den)
print("Reflowing from ",src_fps_num/src_fps_den," fps to ",dst_fps_num/dst_fps_den," fps.")
sup = core.mv.Super(clip, pel=1, hpad=8, vpad=8)
bvec = core.mv.Analyse(sup, blksize=8, isb=True , chroma=True, search=3, searchparam=1)
fvec = core.mv.Analyse(sup, blksize=8, isb=False, chroma=True, search=3, searchparam=1)
clip = core.mv.BlockFPS(clip, sup, bvec, fvec, num=dst_fps_num, den=dst_fps_den, mode=3, thscd2=12)
clip.set_output()
```
At this point, you should be able to try this out as suggested in the script. To set this up more permanently, I'd suggest placing this script as `~/.config/mpv/mvtools.vpy`, and also writing the following as `~/.config/mpv/mpv.conf`:
```
hwdec=no
vf=vapoursynth=~/.config/mpv/mvtools.vpy
```
Now, whenever you open mpv, it will always use motion interpolation.
The result is fairly good. I noticed some significant artifacts while watching anime, but it works well with movies. I'm guessing that it is harder to track the motion in animations since they are generally exaggerated.
One thing to keep in mind, however, is performance. With `rel=2`, viewing a 1080p video results in around 90% CPU usage across all cores and 1.6 GBs of ram on my Intel i7 4700MQ. With `rel=1`, CPU usage goes down to about 60% per core. This process is very heavy on the processor, and you may have trouble unless you have a fast CPU.

View File

@ -0,0 +1,44 @@
---
title: My response to Aurynn Shaw's "Contempt Culture" post
date: 2022-03-27
---
> This post is day 6 of me taking part in the
> [#100DaysToOffload](https://100daystooffload.com/) challenge.
I recently came across [Aurynn Shaw's article on "Contempt Culture"](https://blog.aurynn.com/2015/12/16-contempt-culture/).
I'm a bit late to the party, but I wanted to talk about this too.
Aurynn's article talks about how some programming languages are considered
inferior, and programmers using these languages are considered less competent.
It's a good article, and you should take a look at it if you haven't.
## my thoughts
One thing I've come to realize over the years is that there are really no "bad
programming languages". Ignoring esolangs like brainfuck which are not really
meant to be used for anything serious, most programming languages are designed
to fit a niche. I'm using the term like it's used in ecology: every programming
language has a place in the ecosystem of technology and programming.
PHP is bad? PHP certainly has its drawbacks, but it also has its advantages.
"Drop these files into a folder and it works" is an amazing way to get started
programming. It's also a great way to inject a bit of dynamic content into
otherwise static pages. In fact, it's simpler and more straightforward solution
than building a REST API and a web app where you have to re-invent server side
rendering just to get back to where PHP already was!
That's not to say PHP is perfect or the best language to use. It's a language I
personally don't like. But that doesn't make it a bad or "stupid" programming
language. At worst it's a programming language that doesn't fit my needs. If I
extrapolate that and say that PHP is a bad language, that would instead show my
ego. Do I really think I'm so great that anything I don't like is just
immediately bad? Something Aurynn said resonates with me here:
> It didn't matter that it was (and remains) difficult to read, it was that we
> were better for using it.
I just want to conclude this with one thing: next time you think a programming
language or tool or whatever is bad, think to yourself whether that's because it
doesn't feel cool or because you saw others making fun of it, or because you
actually evaluated the pros and cons and came up with a calculated decision.

18
content/posts/pass.md Normal file
View File

@ -0,0 +1,18 @@
---
title: Switching to pass
date: 2015-03-30
---
For some time, I used LastPass to store my passwords. While LastPass works well, it doesn't fit into the keyboard driven setup I have. I have been looking into alternatives for some time, I looked into KeePassX but just like LastPass, it doesn't give me any ways to set up keyboard shortcuts. On the other hand, and I recently came across [pass](http://www.passwordstore.org/), and it provides everything I want.
<!--more-->
Pass uses GPG keys to encrypt the passwords, and git to keep revisions and backups. It integrates well with the shell, and there is a dmenu script, a Firefox plugin and an Android app. All the passwords are just GPG enrypted files, stored in some folders anyway, so you don't need anything special to work with them.
![A terminal window with the command "pass ls archlinux.org". The output lists "SeriousBug@Gmail.com" and "SeriousBug". Above the terminal is a bar, with "archlin" typed on the left, and the option "archlinux.org/SeriousBug@Gmail.com" displayed on the right.](/img/passmenu.png)
So first, I needed to migrate my passwords from LastPass to pass. The website lists some scripts for migration, but sadly I missed that when I first looked at the page. So I decided to write a [python script to handle the migration](https://gist.github.com/SeriousBug/e9f33873d10ad944cbe6) myself. It inserts all passwords in `domain/username` format, and if there is any extra data written, it is added after the password as well. Secure notes are placed into their own folder, and any "Generated Password for ..." entries are skipped. If you're migrating from LastPass to pass, feel free to give it a try. If you are taking an export from their website however, do make sure that there is no whitespace before and after the csv.
![An Android phone screenshot. A search bar at the top displays "archlin" typed in, and below the search bar the options "archlinux.org" and "wiki.archlinux.org" are listed.](/img/password_store.png)
I certainly recommend trying out pass. It works very well, and it fits in with the unix philosophy.

188
content/posts/raid.md Normal file
View File

@ -0,0 +1,188 @@
---
title: My local data storage setup
date: 2022-03-10
---
Recently, I've needed a bit more storage. In the past I've relied on Google
Drive, but if you need a lot of space Google Drive becomes prohibitively
expensive. The largest option available, 2 TB, runs you $100 a year at the time
of writing. While Google Drive comes with a lot of features, it also comes with
a lot of privacy concerns, and I need more than 2 TB anyway. Another option
would be Backblaze B2 or AWS S3, but the cost is even higher. Just to set a
point of comparison, 16 TB of storage would cost $960 a year with B2 and a
whopping $4000 a year with S3.
Luckily in reality, the cost of storage per GB has been coming down steadily.
Large hard drives are cheap to come by, and while these drives are not
incredibly fast, they are much faster than the speed of my internet connection.
Hard drives it is then!
While I could get a very large hard drive, it's generally a better idea to get
multiple smaller hard drives. That's because these drives often offer a better
$/GB rate, but also because it allows us to mitigate the risk of data loss. So
after a bit of search, I found these "Seagate Barracuda Compute 4TB" drives. You
can find them on [Amazon](https://www.amazon.com/gp/product/B07D9C7SQH/) or
[BestBuy](https://www.bestbuy.com/site/seagate-barracuda-4tb-internal-sata-hard-drive-for-desktops/6387158.p?skuId=6387158).
These hard drives are available for $70 each at the time I'm writing this,and I bought 6 of them.
This gets me to around $420, plus a bit more for SATA cables.
Looking at [Backblaze Hard Drive Stats](https://www.backblaze.com/blog/backblaze-drive-stats-for-2021/),
I think it's fair to assume these drives will last at least 5 years.
Dividing the cost by the expected lifetime, that gets me $84 per year, far below what the cloud storage costs!
It's of course not as reliable, and it requires maintenance on my end, but
the difference in price is just too far to ignore.
## Setup
I decided to set this all up inside my desktop computer. I have a large case so
fitting all the hard drives in is not a big problem, and my motherboard does
support 6 SATA drives (in addition to the NVMe that I'm booting off of). I also
run Linux on my desktop computer, so I've got all the required software
available.
For the software side of things, I decided to go with `mdadm` and `ext4`. There
are also other options available like ZFS (not included in the linux kernel) or
btrfs (raid-5 and raid-6 are known to be unreliable), but this was the setup I
found the most comfortable and easy to understand for me. How it works is that
`mdadm` combines the disks and presents it as a block device, then `ext4`
formats and uses the block device the same way you use it with any regular
drive.
### Steps
I was originally planning to write the steps I followed here, but in truth I
just followed whatever the [ArchLinux wiki](https://wiki.archlinux.org/title/RAID#Installation)
told me. So I'll just recommend you follow that as well.
The only thing I'll warn you is that the wiki doesn't clearly note just how long
this process takes. It took almost a week for the array to build, and until the
build is complete the array runs at a reduced performance. Be patient, and just
give it some time to finish. As a reminder, you can always check the build
status with `cat /dev/mdstat`.
## Preventative maintenance
Hard drives have a tendency to fail, and because RAID arrays are resilient, the
failures can go unnoticed. You **need** to regularly check that the array is
okay. Unfortunately, while there are quite a few resources online on how to set
up RAID, very few of them actually talk about how to set up scrubs (full scans
to look for errors) and error monitoring.
For my setup, I decided to set up systemd to check and report issues. For this,
I first set up 2 timers: 1 that checks if there are any reported errors on the
RAID array, and another that scrubs the RAID array. Systemd timers are 2 parts,
a service file and a timer file, so here's all the files.
- `array-scrub.service`
```toml
[Unit]
Description=Scrub the disk array
After=multi-user.target
OnFailure=report-failure-email@array-scrub.service
[Service]
Type=oneshot
User=root
ExecStart=bash -c '/usr/bin/echo check > /sys/block/md127/md/sync_action'
[Install]
WantedBy=multi-user.target
```
- `array-scrub.timer`
```toml
[Unit]
Description=Periodically scrub the array.
[Timer]
OnCalendar=Sat *-*-* 05:00:00
[Install]
WantedBy=timers.target
```
The timer above is the scrub operation, it tells RAID to scan the drives for
errors. It actually takes up to a couple days in my experience for the scan to
complete, so I run it once a week.
- `array-report.service`
```toml
[Unit]
Description=Check raid array errors that were found during a scrub or normal operation and report them.
After=multi-user.target
OnFailure=report-failure-email@array-report.service
[Service]
Type=oneshot
ExecStart=/usr/bin/mdadm -D /dev/md127
[Install]
WantedBy=multi-user.target
```
- `array-report.timer`
```toml
[Unit]
Description=Periodically report any issues in the array.
[Timer]
OnCalendar=daily
[Install]
WantedBy=timers.target
```
And this timer above checks the RAID array status to see if there were any
errors found. This timer runs much more often (once a day), because it's
instant, and also because RAID can find errors during regular operation even
when you are not actively running a scan.
### Error reporting
Another important thing here is this line in the service file:
```toml
OnFailure=report-failure-email@array-report.service
```
The automated checks are of no use if I don't know when something actually
fails. Luckily, systemd can run a service when another service fails, so I'm
using this to report failures to myself. Here's what the service file looks like:
- `report-failure-email@.service`
```toml
[Unit]
Description=status email for %i to user
[Service]
Type=oneshot
ExecStart=/usr/local/bin/systemd-email address %i
User=root
```
- `/usr/local/bin/systemd-email`
```sh
#!/bin/sh
/usr/bin/sendmail -t <<ERRMAIL
To: homelab@bgenc.net
From: systemd <root@$HOSTNAME>
Subject: Failure on $2
Content-Transfer-Encoding: 8bit
Content-Type: text/plain; charset=UTF-8
$(systemctl status --lines 100 --no-pager "$2")
ERRMAIL
```
The service just runs this shell script, which is just a wrapper around
sendmail. The `%i` in the service is the part after `@` when you use the
service, you can see that the `OnFailure` hook puts `array-report` after the `@`
which then gets passed to the email service, which then passes it on to the mail
script.
To send emails, you also need to set up `sendmail`. I decided to install
[msmtp](https://wiki.archlinux.org/title/Msmtp), and set it up to use my GMail
account to send me an email.
To test if the error reporting works, edit the `array-report.service` and change
the line `ExecStart` line to `ExecStart=false`. Then run the report service with
`systemd start array-report.service`, you should now get an email letting you
know that the `array-report` service failed, and attaches the last 100 lines of
the service status to the email.

View File

@ -0,0 +1,91 @@
---
title: Running graphical user services with systemd
date: 2022-03-18
---
> This post is day 3 of me taking part in the
> [#100DaysToOffload](https://100daystooffload.com/) challenge.
I've recently switched from KDE Plasma to sway as my window manager. I had a problem with the change though: the amazing kdeconnect service weren't working!
My first attempt at fixing this was to just add a lines into sway config to launch it along with sway.
```
exec /usr/lib/kdeconnectd
```
Looks simple enough. But for some reason, `kdeconnectd` would just disappear
after a while. It would appear to run at startup, and then an hour or two later
I pull up the kdeconnect app on my phone and it would tell me that my computer
is disconnected.
The biggest issue here was that I had no way to see why kdeconnect had failed.
In comes systemd to save the day. Systemd is a service manager, so it will
actually maintain the logs for these services. That means if kdeconnect is
crashing, I can check the logs for kdeconnect to see why it crashed. I can also
configure it to auto-restart after a crash if I want to.
To launch graphical applications with systemd though, you need to pass the
appropriate environment variables to it so it knows how to launch new windows.
I added this line to my sway config to do exactly that.
```
# Pass all variables to dbus & systemd to run graphical user services
exec dbus-update-activation-environment --all --systemd
```
Next, we need to write a service files to run the application. This is easier
than it sounds, here's the service file I wrote for kdeconnect:
```
[Unit]
Description=Run kdeconnectd.
After=graphical-session.target
StartLimitIntervalSec=600
StartLimitBurst=5
[Service]
Type=basic
ExecStart=/usr/lib/kdeconnectd
Restart=on-failure
RestartSec=5s
[Install]
WantedBy=graphical-session.target
```
I saved this as `~/.config/systemd/user/kdeconnectd.service`. Finally, enabled it for my user with `systemctl --user enable kdeconnectd.service` and then restarted.
The service is configured to automatically restart on failure, but not if it
failed more than 5 times in the last 10 minutes. Systemd also waits 5 seconds
before trying to restart the failed service. This way if it crashes for some
reason, it will restart. But if it keeps crashing rapidly, it won't keep
trying to restart which could take up too much system resources.
I can now check how the service is doing with systemd!
```
Warning: The unit file, source configuration file or drop-ins of kdeconnectd.service changed on disk. Run 'systemctl --user daemon-reload>
● kdeconnectd.service - Run kdeconnectd.
Loaded: loaded (/home/kaan/.config/systemd/user/kdeconnectd.service; enabled; vendor preset: enabled)
Active: active (running) since Thu 2022-03-17 14:18:15 EDT; 1h 46min ago
Main PID: 2188363 (kdeconnectd)
Tasks: 6 (limit: 77007)
Memory: 24.2M
CPU: 2.440s
CGroup: /user.slice/user-1000.slice/user@1000.service/app.slice/kdeconnectd.service
└─2188363 /usr/lib/kdeconnectd
Mar 17 14:20:58 eclipse systemd[817]: /home/kaan/.config/systemd/user/kdeconnectd.service:6: Unknown key name 'type' in section 'Service'>
Mar 17 15:16:11 eclipse kdeconnectd[2188363]: QObject::connect(KWayland::Client::Registry, Unknown): invalid nullptr parameter
Mar 17 15:16:11 eclipse kdeconnectd[2188363]: kdeconnect.plugin.battery: No Primary Battery detected on this system. This may be a bug.
Mar 17 15:16:11 eclipse kdeconnectd[2188363]: kdeconnect.plugin.battery: Total quantity of batteries found: 0
Mar 17 15:23:26 eclipse kdeconnectd[2188363]: QObject::connect(KWayland::Client::Registry, Unknown): invalid nullptr parameter
Mar 17 15:23:26 eclipse kdeconnectd[2188363]: kdeconnect.plugin.battery: No Primary Battery detected on this system. This may be a bug.
Mar 17 15:23:26 eclipse kdeconnectd[2188363]: kdeconnect.plugin.battery: Total quantity of batteries found: 0
Mar 17 15:23:26 eclipse kdeconnectd[2188363]: QMetaObject::invokeMethod: No such method KIO::StoredTransferJob::slotDataReqFromDevice()
Mar 17 15:24:35 eclipse kdeconnectd[2188363]: QMetaObject::invokeMethod: No such method KIO::StoredTransferJob::slotDataReqFromDevice()
Mar 17 15:57:29 eclipse systemd[817]: /home/kaan/.config/systemd/user/kdeconnectd.service:9: Unknown key name 'type' in section 'Service'>
```
A bunch of warnings so far, but no crashes yet. But if it does crash again, I'll finally know why.

View File

@ -0,0 +1,47 @@
---
title: A little type system trick in Rust
date: 2022-03-15
---
> This post is day 1 of me taking part in the
> [#100DaysToOffload](https://100daystooffload.com/) challenge.
While working on a small project recently, I ended up writing this type in Rust.
```rust
type ImageData = Arc<Mutex<Option<ImageBuffer<Rgba<u8>, Vec<u8>>>>>;
```
Even though I wrote it myself, it actually took me a bit after writing it to
figure out what this type was doing so I wanted to write about it.
Let me start from outside-in, the first type we have is `Arc`. `Arc` stands for
"atomic reference counting". [Reference counting](https://en.wikipedia.org/wiki/Reference_counting)
is a method to handle ownership of the data, or in other words to figure out
when the data needs to be freed. Garbage collected languages do this
transparently in the background, but in Rust we explicitly need to state that we
want it. Atomic means this is done using [atomic operations](https://en.wikipedia.org/wiki/Linearizability#Primitive_atomic_instructions),
so it is thread safe. In my case, I needed this because this data was going to
be shared between multiple threads, and I didn't know exactly when I would be "done"
with the data.
The next type is `Mutex`, which means [mutual exclusion](https://en.wikipedia.org/wiki/Lock_(computer_science))
or locking. Locks are used to restrict access to data to a single thread at a time.
That means whatever type is inside of this is not thread safe,
so I'm using the lock to protect it. Which is true!
The type after that is `Option`. This basically means "nullable", there may or may not be a thing inside this.
The interesting thing here is that this is a [sum type](https://en.wikipedia.org/wiki/Tagged_union),
so Rust helps remind us that this is nullable without introducing a nullability concept to the language. It's just part of the type system!
Then we have `ImageBuffer`, a type from the popular [image crate](https://docs.rs/image/latest/image/index.html).
Not much to talk about with this, that's the data I wanted to store.
The next thing that *is* interesting is the `Rgba<u8>` and `Vec<u8>` inside the image buffer.
What that means (and I'm speculating here because I'm lazy/too busy to check), is that
`Rgba` is just a basic wrapper type (or a "newtype"). It makes the compiler enforce the type of the
image data that's stored in this image buffer, so the user doesn't mix up different data types.
Similar for `Vec<u8>`, (I think) it means that the data inside this buffer is stored in a vector.
Finally, `u8` is probably self descriptive, the pixels and the vector are made out of 8-bit unsigned integers.

View File

@ -0,0 +1,83 @@
---
title: State of Rust GUIs
date: 2022-03-17
---
> This post is day 2 of me taking part in the
> [#100DaysToOffload](https://100daystooffload.com/) challenge.
The website [Are we GUI Yet?](https://www.areweguiyet.com/) helpfully lists a
lot of the libraries and frameworks available for making a GUI in Rust. I've
been looking into making a GUI program in Rust, so I've been working my way
through some of these options.
This is not a through review, just my thoughts after a brief look. I'd recommend
looking over the website and deciding for yourself.
## Best candidate: Dioxus
- Website: https://dioxuslabs.com/
Dioxus is probably the option I like the best from a quick look. Declarative
applications similar to React, encapsulated components, first class async
support, and good type checking.
Downsides? Right now it's web only. Desktop applications are just web
applications rendered inside a web view. That's okay for cross platform apps,
but not for what I want to do which is a lightweight native application.
## Better Electron: Tauri
- Website: https://github.com/tauri-apps/tauri
Tauri is a really good replacement for Electron. You can see the comparison on
their Github page, smaller binaries, less memory use, and faster launch times.
But again, it is a web app running in a web view. Not a native desktop app. Even
though Tauri uses less memory than electron, it still uses ~180 MB according to
their comparison. And the fast launch time is still around 0.4 seconds, way
longer than what I would expect.
## My current preference: Slint
- Website: https://slint-ui.com/
I really like Slint. It is a native GUI with their own OpenGL renderer, and an
optional Qt backend. From some basic experimentation, it seems to launch in less
than 50ms, and uses less than 80 MB of memory (mostly shared libraries).
You can write the code in either `.slint` files (and they actually have okay
editor support for this file type), or inside macros in your code files. The
code also looks pretty intuitive.
The downsides? The theming support is not great/nonexistent, you can't
dynamically generate UI elements (well kinda, you can generate them based on
properties you change at runtime, but the components themselves are hardcoded),
and the code sometimes gets awkward due to current limitations.
```rust
MainWindow := Window {
// You then have to bind to this callback inside rust code. No way to just write a hook that calls a rust function.
callback save_to_file(string);
HorizontalLayout {
height: 32px;
FilePath := LineEdit {
placeholder_text: "placeholder here";
}
Button {
text: "Save to file";
clicked => { save_to_file(FilePath.text); }
}
}
}
```
There is also no way to do some things, like setting a dialog hint for your main
window, which is something I needed to do.
## Conclusion?
It looks like the state of GUIs in rust is still "not yet". There are a few more
projects I need to look at, like [Relm](https://github.com/antoyo/relm), but
their code looks way too verbose to me. In the end, I think the best option
might be to just write my GUI in C++ with Qt, and maybe integrate bits written
in rust inside of that.

68
content/publications.md Normal file
View File

@ -0,0 +1,68 @@
---
no-ttr: true
---
<div> <div class="publication">
## Crafty: Efficient, HTM-Compatible Persistent Transactions
<div class="authors">Kaan Genç, Michael D. Bond, and Guoqing Harry Xu</div>
<div class="conf">ACM SIGPLAN Conference on Programming Language Design and Implementation <a href="https://pldi20.sigplan.org/home">(PLDI 2020)</a>, Online, June 2020</div>
Crafty is a library for transactional storage, built for new non-volatile memory
hardware. Taking advantage of hardware transactional capabilities of modern
CPUs, it provides a low-overhead option that also eliminates the need for
additional concurrency control.
[Talk](https://www.youtube.com/watch?v=wdVLlQXV1to) [Paper](https://dl.acm.org/doi/10.1145/3385412.3385991) [Extended Paper](https://arxiv.org/pdf/2004.00262.pdf) [Implementation](https://github.com/PLaSSticity/Crafty) [Poster](/extra/Crafty Poster.pdf)
</div>
<div class="publication">
## Dependence Aware, Unbounded Sound Predictive Race Detection
<div class="authors">Kaan Genç, Jake Roemer, Yufan Xu, and Michael D. Bond</div>
<div class="conf">ACM SIGPLAN International Conference on Object-Oriented Programming, Systems, Languages, and Applications <a href="https://2019.splashcon.org/track/splash-2019-oopsla">(OOPSLA 2019)</a>, Athens, Greece, October 2019</div>
This paper presents 2 data race detection analyses which analyze a single run of
a program to predict data races that can happen in other runs. These analyses
take advantage of data and control flow dependence to accurately understand how
the analyzed program works, expanding what races can be predicted.
[Talk](https://www.youtube.com/watch?v=YgZWnc31tVQ) [Extended Paper (updated version)](https://arxiv.org/pdf/1904.13088.pdf) [Paper](https://dl.acm.org/doi/10.1145/3360605) [Corrigendum to paper](https://dl.acm.org/action/downloadSupplement?doi=10.1145%2F3360605&file=3360605-corrigendum.pdf) [Implementation](https://github.com/PLaSSticity/SDP-WDP-implementation) [Poster](/extra/DepAware Poster.pdf)
</div>
<div class="publication">
## SmartTrack: Efficient Predictive Race Detection
<div class="authors">Jake Roemer, Kaan Genç, and Michael D. Bond</div>
<div class="conf">ACM SIGPLAN Conference on Programming Language Design and Implementation <a href="https://pldi20.sigplan.org/home">(PLDI 2020)</a>, Online, June 2020 </div>
Predictive data race detection methods greatly improve the number of data races
found, but they typically significantly slow down programs compared to their
non-predictive counterparts. SmartTrack, through improved analyses and clever
algorithms, reduces their overhead to just around non-predictive analyses
without impacting their performance.
[Paper](http://web.cse.ohio-state.edu/~mikebond/smarttrack-pldi-2020.pdf) [Extended Paper](https://arxiv.org/pdf/1905.00494.pdf)
</div>
<div class="publication">
## High-Coverage, Unbounded Sound Predictive Race Detection
<div class="authors">Jake Roemer, Kaan Genç, and Michael D. Bond</div>
<div class="conf">ACM SIGPLAN Conference on Programming Language Design and Implementation <a href="https://pldi18.sigplan.org/">(PLDI 2018)</a>, Philadelphia, PA, USA, June 2018</div>
Predictive data race detection methods typically walk a tight line between
predicting more races and avoiding false races. This paper presents a new
analysis that can predict more races, and a method to efficiently eliminate
false races.
[Paper](http://web.cse.ohio-state.edu/~bond.213/vindicator-pldi-2018.pdf) [Extended Paper](http://web.cse.ohio-state.edu/~bond.213/vindicator-pldi-2018-xtr.pdf)
</div>
</div>
# Activities
[PLDI 2021](https://pldi21.sigplan.org/track/pldi-2021-PLDI-Research-Artifacts) Artifact Evaluation Committee member
[ASPLOS 2021](https://asplos-conference.org/2021/) Artifact Evaluation Committee member
[OOPSLA 2020](https://2020.splashcon.org/track/splash-2020-Artifacts) Artifact Evaluation Committee member

File diff suppressed because one or more lines are too long

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
output/extra/cv.pdf Normal file

Binary file not shown.

70
output/extra/emacs.css Normal file
View File

@ -0,0 +1,70 @@
/* pygments.org "emacs" style */
.highlight .hll { background-color: #ffffcc }
.highlight { background: #f8f8f8; }
.highlight .c { color: #008800; font-style: italic } /* Comment */
.highlight .err { border: 1px solid #FF0000 } /* Error */
.highlight .k { color: #AA22FF; font-weight: bold } /* Keyword */
.highlight .o { color: #666666 } /* Operator */
.highlight .ch { color: #008800; font-style: italic } /* Comment.Hashbang */
.highlight .cm { color: #008800; font-style: italic } /* Comment.Multiline */
.highlight .cp { color: #008800 } /* Comment.Preproc */
.highlight .cpf { color: #008800; font-style: italic } /* Comment.PreprocFile */
.highlight .c1 { color: #008800; font-style: italic } /* Comment.Single */
.highlight .cs { color: #008800; font-weight: bold } /* Comment.Special */
.highlight .gd { color: #A00000 } /* Generic.Deleted */
.highlight .ge { font-style: italic } /* Generic.Emph */
.highlight .gr { color: #FF0000 } /* Generic.Error */
.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */
.highlight .gi { color: #00A000 } /* Generic.Inserted */
.highlight .go { color: #888888 } /* Generic.Output */
.highlight .gp { color: #000080; font-weight: bold } /* Generic.Prompt */
.highlight .gs { font-weight: bold } /* Generic.Strong */
.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
.highlight .gt { color: #0044DD } /* Generic.Traceback */
.highlight .kc { color: #AA22FF; font-weight: bold } /* Keyword.Constant */
.highlight .kd { color: #AA22FF; font-weight: bold } /* Keyword.Declaration */
.highlight .kn { color: #AA22FF; font-weight: bold } /* Keyword.Namespace */
.highlight .kp { color: #AA22FF } /* Keyword.Pseudo */
.highlight .kr { color: #AA22FF; font-weight: bold } /* Keyword.Reserved */
.highlight .kt { color: #00BB00; font-weight: bold } /* Keyword.Type */
.highlight .m { color: #666666 } /* Literal.Number */
.highlight .s { color: #BB4444 } /* Literal.String */
.highlight .na { color: #BB4444 } /* Name.Attribute */
.highlight .nb { color: #AA22FF } /* Name.Builtin */
.highlight .nc { color: #0000FF } /* Name.Class */
.highlight .no { color: #880000 } /* Name.Constant */
.highlight .nd { color: #AA22FF } /* Name.Decorator */
.highlight .ni { color: #999999; font-weight: bold } /* Name.Entity */
.highlight .ne { color: #D2413A; font-weight: bold } /* Name.Exception */
.highlight .nf { color: #00A000 } /* Name.Function */
.highlight .nl { color: #A0A000 } /* Name.Label */
.highlight .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */
.highlight .nt { color: #008000; font-weight: bold } /* Name.Tag */
.highlight .nv { color: #B8860B } /* Name.Variable */
.highlight .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */
.highlight .w { color: #bbbbbb } /* Text.Whitespace */
.highlight .mb { color: #666666 } /* Literal.Number.Bin */
.highlight .mf { color: #666666 } /* Literal.Number.Float */
.highlight .mh { color: #666666 } /* Literal.Number.Hex */
.highlight .mi { color: #666666 } /* Literal.Number.Integer */
.highlight .mo { color: #666666 } /* Literal.Number.Oct */
.highlight .sa { color: #BB4444 } /* Literal.String.Affix */
.highlight .sb { color: #BB4444 } /* Literal.String.Backtick */
.highlight .sc { color: #BB4444 } /* Literal.String.Char */
.highlight .dl { color: #BB4444 } /* Literal.String.Delimiter */
.highlight .sd { color: #BB4444; font-style: italic } /* Literal.String.Doc */
.highlight .s2 { color: #BB4444 } /* Literal.String.Double */
.highlight .se { color: #BB6622; font-weight: bold } /* Literal.String.Escape */
.highlight .sh { color: #BB4444 } /* Literal.String.Heredoc */
.highlight .si { color: #BB6688; font-weight: bold } /* Literal.String.Interpol */
.highlight .sx { color: #008000 } /* Literal.String.Other */
.highlight .sr { color: #BB6688 } /* Literal.String.Regex */
.highlight .s1 { color: #BB4444 } /* Literal.String.Single */
.highlight .ss { color: #B8860B } /* Literal.String.Symbol */
.highlight .bp { color: #AA22FF } /* Name.Builtin.Pseudo */
.highlight .fm { color: #00A000 } /* Name.Function.Magic */
.highlight .vc { color: #B8860B } /* Name.Variable.Class */
.highlight .vg { color: #B8860B } /* Name.Variable.Global */
.highlight .vi { color: #B8860B } /* Name.Variable.Instance */
.highlight .vm { color: #B8860B } /* Name.Variable.Magic */
.highlight .il { color: #666666 } /* Literal.Number.Integer.Long */

BIN
output/extra/kaangenc.gpg Normal file

Binary file not shown.

BIN
output/img/2022-03-29-00-16-13.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
output/img/2022-03-29-00-17-38.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
output/img/2022-03-29-00-20-48.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
output/img/2022-03-29-00-22-48.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
output/img/ace-jump-mode.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
output/img/app-search-bar.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
output/img/cc-by-sa-4.0-88x31.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
output/img/company-flycheck.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
output/img/deus-ex-render-settings.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
output/img/deus-ex-renderer-comparison.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
output/img/docview.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
output/img/elfeed.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
output/img/emacs-terminal.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
output/img/erc.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
output/img/eshell.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
output/img/eww.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
output/img/game-cover.jpg (Stored with Git LFS) Normal file

Binary file not shown.

BIN
output/img/magit.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
output/img/mu4e.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
output/img/passmenu.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
output/img/password_store.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
output/img/profile.jpg (Stored with Git LFS) Normal file

Binary file not shown.

24
output/index.gmi Normal file
View File

@ -0,0 +1,24 @@
# Kaan Barmore-Genç
Hello, I'm Kaan Barmore-Genc.
Index of posts:
=> posts/managing-my-recipes-with-dendron.gmi 2022-04-04 00:00 - Managing my recipes with Dendron
=> posts/black-crown-initiate.gmi 2022-04-02 00:00 - Black Crown Initiate
=> posts/bulgur-cloud-intro.gmi 2022-03-29 00:00 - An introduction to Bulgur Cloud - simple self hosted cloud storage
=> posts/do-kids-know-computers-today.gmi 2022-03-28 00:00 - Do kids not know computers now?
=> posts/my-response-to-contempt-culture.gmi 2022-03-27 00:00 - My response to Aurynn Shaw's "Contempt Culture" post
=> posts/actix-web-import-issue.gmi 2022-03-26 00:00 - Solving `app_data` or `ReqData` missing in requests for actix-web
=> posts/mass-batch-processing-on-the-CLI.gmi 2022-03-19 00:00 - Mass batch processing on the CLI
=> posts/running-graphical-user-services-with-systemd.gmi 2022-03-18 00:00 - Running graphical user services with systemd
=> posts/state-of-rust-GUIs.gmi 2022-03-17 00:00 - State of Rust GUIs
=> posts/rust-typesystem-tricks.gmi 2022-03-15 00:00 - A little type system trick in Rust
=> posts/getting-deus-ex-running-on-linux.gmi 2022-03-12 00:00 - Getting Deus Ex GOTY Edition running on Linux
=> posts/raid.gmi 2022-03-10 00:00 - My local data storage setup
=> posts/emacs-as-an-os.gmi 2016-04-14 00:00 - Emacs as an operating system
=> posts/customizing-emacs.gmi 2015-10-06 00:00 - Emacs and extensibility
=> posts/mpv.gmi 2015-07-18 00:00 - Motion Interpolation, 24 FPS to 60 FPS with mpv, VapourSynth and MVTools
=> posts/duplicity.gmi 2015-05-16 00:00 - Taking Backups with Duplicity
=> posts/bash.gmi 2015-04-12 00:00 - Writing a Program in Bash
=> posts/pass.gmi 2015-03-30 00:00 - Switching to pass

View File

@ -0,0 +1,104 @@
# Solving `app_data` or `ReqData` missing in requests for actix-web
2022-03-26 00:00
> This post is day 5 of me taking part in the
> #100DaysToOffload challenge.
=> https://100daystooffload.com/ #100DaysToOffload
I'm using `actix-web` to set up a web server, and I've been hitting a small problem that I think other people may come across too.
To explain the problem, let me talk a bit about my setup. I have a custom middleware that checks if a user is authorized to access a route. It looks like this:
```rust
impl<S: 'static, B> Service<ServiceRequest> for CheckLoginMiddleware<S>
where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
S::Future: 'static,
{
type Response = ServiceResponse<EitherBody<B>>;
type Error = Error;
type Future = LocalBoxFuture<'static, Result<Self::Response, Self::Error>>;
dev::forward_ready!(service);
fn call(&self, req: ServiceRequest) -> Self::Future {
let state = self.state.clone();
let (request, payload) = req.into_parts();
let service = self.service.clone();
let user_token = get_token_from_header(&request);
let path_token = if self.allow_path_tokens {
get_token_from_query(&request)
} else {
None
};
Box::pin(async move {
match verify_auth(state, user_token, path_token, request.path()).await {
Ok(authorized) => {
tracing::debug!("Request authorized, inserting authorization token");
// This is the "important bit" where we insert the authorization token into the request data
request.extensions_mut().insert(authorized);
let service_request =
service.call(ServiceRequest::from_parts(request, payload));
service_request
.await
.map(ServiceResponse::map_into_left_body)
}
Err(err) => {
let response = HttpResponse::Unauthorized().json(err).map_into_right_body();
Ok(ServiceResponse::new(request, response))
}
}
})
}
}
```
The `verify_auth` function is omitted, but the gist of it is that it returns an `Result<Authorized, Error>`. If the user is authorized, the authorization token `verify_auth` returned is then attached to the request.
Then here's how I use it in a path:
```rust
#[delete("/{store}/{path:.*}")]
async fn delete_storage(
params: web::Path<(String, String)>,
// This parameter is automatically filled with the token
authorized: Option<ReqData<Authorized>>,
) -> Result<HttpResponse, StorageError> {
let (store, path) = params.as_ref();
let mut store_path = get_authorized_path(&authorized, store)?;
store_path.push(path);
if fs::metadata(&store_path).await?.is_file() {
tracing::debug!("Deleting file {:?}", store_path);
fs::remove_file(&store_path).await?;
} else {
tracing::debug!("Deleting folder {:?}", store_path);
fs::remove_dir(&store_path).await?;
}
Ok(HttpResponse::Ok().finish())
}
```
This setup worked for this path, but would absolutely not work for another path. I inserted logs to track everything, and just found that the middleware would insert the token, but the path would just get `None`. How‽ I tried to slowly strip everything away from the non-functional path until it was identical to this one, but it still would not work.
Well it turns out the solution was very simple, see this:
```rust
use my_package::storage::put_storage;
use crate::storage::delete_storage;
```
Ah! They are imported differently. I had set up my program as both a library and a program for various reasons. However, it turns out importing the same thing from `crate` is different from importing it from the library. Because of the difference in import, Actix doesn't recognize that the types match, so the route can't access the attached token.
The solution is normalizing the imports. I went with going through the library for everything, because that's what `rust-analyzer`s automatic import seems to prefer.
```rust
use my_package::storage::{put_storage, delete_storage};
```
Solved!

9
output/posts/bash.gmi Normal file
View File

@ -0,0 +1,9 @@
# Writing a Program in Bash
2015-04-12 00:00
I don't really know why, but writing code in Bash makes me kinda anxious. It feels really old, outdated, and confusing. Why can't a function return a string? And no classes, or even data types? After getting confused, usually, I just end up switching to Python. But this time, I decided to stick with Bash. And I am surprised. It is unbelievebly good. I must say, now I understand the Unix philosophy much better. Having small programs doing one thing very good allows you to combine the power of those programs in your scripts. You think your favourite programming language has a lot of libraries? Well, bash has access to more. The entire Unix ecosystem powers bash. Converting videos, taking screenshots, sending mails, downloading and processing pages; there are already command line tools for all of that, and you have great access to all of them.
The program I've started writing is called WoWutils. And I'm still shocked at just how much functionality I have added with so little code. If you are considering writing a program in Bash too, just go through with it. It really is very powerful.
=> https://github.com/SeriousBug/WoWutils WoWutils

View File

@ -0,0 +1,20 @@
# Black Crown Initiate
2022-04-02 00:00
> This post is day 9 of me taking part in the
> #100DaysToOffload challenge.
=> https://100daystooffload.com/ #100DaysToOffload
I love metal, I've been listening to metal since I was 13. It was the first music genre that I actually liked: until I discovered metal I actually thought I didn't like music at all, because nothing I heard on the radio or heard my friends listening to were interesting to me. My taste in music has expanded and changed over the years to include different types of music and genres, but metal remains the one I love the most.
Demonstrating my metal-worthiness aside, I've always listened to European metal bands. I had this weird elitist thought that "good" metal could only come from Europe, with exceptions for some non-European bands, and that American metal was just always bad. This is obviously false, but I just had never came across anything American that I had liked. That's until recently.
I recently came across Black Crown Initiate, a progressive death metal band from Pennsylvania. And I have to tell you that they are amazing.
=> https://www.metal-archives.com/bands/Black_Crown_Initiate/3540386765 Black Crown Initiate
Their first release "Song of the Crippled Bull" is absolutely amazing. The music is just the right amount of metal and progressive, and lyrics are amazing. The clean vocals get the themes of the song across, while the growls give a lot of power to the songs. My favorite songs from this release are "Stench of the Iron Age" and the title track "Song of the Crippled Bull". Other hightlights from the band I've listened to so far include "A Great Mistake", "Death Comes in Reverse", "Vicious Lives".
I'm still making my way through their songs, but I'm glad to have discovered something from America that I absolutely love. I'm now trying to find more non-European bands that I enjoy.

View File

@ -0,0 +1,32 @@
# An introduction to Bulgur Cloud - simple self hosted cloud storage
2022-03-29 00:00
> This post is day 8 of me taking part in the
> #100DaysToOffload challenge.
=> https://100daystooffload.com/ #100DaysToOffload
I've been recently working on Bulgur Cloud, a self hosted cloud storage software. It's essentially Nextcloud, minus all the productivity software. It's also designed to be much simpler, using no databases and keeping everything on disk.
The software is still too early to actually demo, but the frontend is at a point where I can show some features off. So I wanted to show it off.
=> /img/2022-03-29-00-17-38.png A white web page with the words "Bulgur Cloud". Below is "Simple and delicious cloud storage and sharing". Under that are two fields titled "Username" and "Password", and a black button titled "Login".
I've been going for a clean "print-like" look. I think it's going pretty well so far.
=> /img/2022-03-29-00-16-13.png A web page with 3 files listed, "sprite-fright.mp4", "test.txt", and "sprite-fright.LICENSE.txt". There are pencil and thrash bin symbols to the right of the file names. A leftward arrow is grayed out on the top left, and top right says "kaan". On the bottom right there's a symbol of a cloud with an up arrow.
I'm not sure about the details of how the directory listing will look. I don't think I like the upload button in the corner, and the rename and delete icons feel like they would be easy to mis-press. There is a confirmation before anything is actually deleted, but it still would be annoying.
=> /img/2022-03-29-00-20-48.png A pop up that says "Delete file "text.txt", with the buttons "Delete" and "Cancel" below it.
Something I'm pretty happy with is the file previews. I've added support for images, videos, and PDFs. Video support is restricted by whatever formats are supported by your browser, the server doesn't do any transcoding, but I think it's still very useful for a quick preview. I'm also planning on support for audio files. The server supports range requests, so you can seek around in the video without waiting to download everything (although I've found that Firefox doesn't handle that very well).
=> /img/2022-03-29-00-22-48.png A page with the text "sprite-fright.mp4", and a video player below showing a frame from the movie. Below the player is a link that says "Download this file".
This is a web interface only so far, but I'm planning to add support for mobile and desktop apps eventually. I've been building the interface with React Native so adding mobile/desktop support shouldn't be too difficult, but I've been finding that "write once, run everywhere" isn't always that simple. I ended up having to add web-only code to support stuff like the video and PDF previews, so I'll have to find replacements for some parts. Mobile and desktop apps natively support more video and audio formats too, and with native code you usually have the kind of performance to transcode video if needed.
The backend is written in Rust with `actix-web`, using async operations. It's incredibly fast, and uses a tiny amount of resources (a basic measurement suggests < 2 MB of memory used). I'm pretty excited about it!
After a few more features (namely being able to move files), I'm planning to put together a demo to show this off live! The whole thing will be open source, but I'm waiting until it's a bit more put together before I make the source public. The source will go live at the same time as the demo.

View File

@ -0,0 +1,55 @@
# Emacs and extensibility
2015-10-06 00:00
Update: I've put the small Emacs tools I have written to a gist.
=> https://gist.github.com/91c38ddde617b98ffbcb gist
I have been using Emacs for some time, and I really love it. The amount of power it has, and the customizability is incredible. What other editor allow you to connect to a server over SSH and edit files, which is what I am doing to write this post. How many editors or IDE's have support for so many languages?
One thing I didn't know in the past, however, is extensibility of Emacs. I mean, I do use a lot of packages, but I had never written Elisp and I didn't know how hard or easy it would be. But after starting to learn Clojure a bit, and feeling more comfortable with lots of parenthesis, I decided to extend Emacs a bit to make it fit myself better.
The first thing I added is an "insert date" function. I use Emacs to take notes during lessons -using Org-mode- and I start every note with the date of the lesson. Sure, glancing at the date to the corner of my screen and writing it down takes just a few seconds, but why not write a command to do it for me? Here is what I came up with:
```commonlisp
(defun insert-current-date ()
"Insert the current date in YYYY-MM-DD format."
(interactive)
(shell-command "date +'%Y-%m-%d'" t))
```
Now that was easy and convenient. And being able to write my first piece of Elisp so easily was really fun, so I decided to tackle something bigger.
It is not rare that I need to compile and run a single C file. Nothing fancy, no libraries, no makefile, just a single C file to compile and run. I searched around the internet like "Emacs compile and run C", but couldn't find anything. I had been doing this by opening a shell in Emacs and compiling/running the program, but again, why not automate it?
The code that follows is not really good. "It works" is as good as it gets really, and actually considering that this is the first substantial Elisp I have written, that is pretty impressive -for the language and Emacs, which are both very helpful and powerful- I think.
```commonlisp
(require 's)
(defun compile-run-buffer ()
"Compile and run buffer."
(interactive)
(let* ((split-file-path (split-string buffer-file-name "/"))
(file-name (car (last split-file-path)))
(file-name-noext (car (split-string file-name "[.]")))
(buffer-name (concat "compile-run: " file-name-noext))
(buffer-name* (concat "*" buffer-name "*")))
(make-comint buffer-name "gcc" nil "-Wall" "-Wextra" "-o" file-name-noext file-name)
(switch-to-buffer-other-window buffer-name*)
(set-process-sentinel (get-buffer-process (current-buffer))
(apply-partially
'(lambda (prog-name proc even)
(if (s-suffix? "finished\n" even)
(progn
(insert "Compilation successful.\n\n")
(comint-exec (current-buffer) prog-name (concat "./" prog-name) nil nil))
(insert (concat "Compilation failed!\n" even))))
file-name-noext))))
```
Again, the code is not really good. I'm uploading it here right now because I'm actually very excited that I wrote this. Just now I can think of ways to improve this, for example moving the compiler and the flags to variables so that they can be customized. I could also improve the presentation, because strings printed by this function, comint and the running programs mixes up. I'll update this blog post if I get to updating the code.
If this is your first time hearing about Emacs, this post may look very confusing. I don't to Emacs any justice here, so do check it out somewhere like Emacs rocks. On the other hand, if you have been looking a functionality like this, hope this helps. If you have any suggestions about the code, I'd love to hear them, you can find my email on the "about me" page. Anyway, have a good day!
=> http://emacsrocks.com/ Emacs rocks

View File

@ -0,0 +1,37 @@
# Do kids not know computers now?
2022-03-28 00:00
> This post is day 7 of me taking part in the
> #100DaysToOffload challenge.
=> https://100daystooffload.com/ #100DaysToOffload
One discussion point I've seen around is that kids nowadays don't know how to use computers. Okay that's a bit of a strawman, but this article titled File Not Found.
=> https://www.theverge.com/22684730/students-file-folder-directory-structure-education-gen-z File Not Found
The gist of the article is that Gen-Z kids are too used to search interfaces. That means they don't actually know about where files are stored, or how they are organized. They only know that they can access the files by searching for them. The article talks about how professors ended up having to teach them how to navigate directory structures and file extensions.
As the article claims, it seems to be related to how modern user interfaces are designed. Our UIs nowadays are more focused around search capabilities: you just type in a search bar and find what you need.
=> /img/app-search-bar.png A desktop, displaying a bar with the words "launch", followed by "fi". On the right side of the bar are program names "Firefox", "fish", "Profiler", "Frontend", "Patch Fixes", and "Qt File Manager". Firefox is highlighted.
In some sense I do like this sort of interface. I use something like that when launching applications, both on my Desktop and on my laptop! It's actually a better interface compared to hunting for icons on your desktop. I use similar interfaces in VSCode to switch between open editor tabs.
However, this is a complimentary interface to hierarchy and organization. Going back to the file systems example discussed in the article, being able to search through your files and folders is useful. But it's not a replacement for hierarchy. You can't just throw files into a folder, and expect to always find them accurately.
Let me give an example with Google Photos. I have been keeping all my photos on Google Photos, and between migrating photos from old phones and ones I have taken on new phones, I have over 8,000 photos. This is completely disorganized of course, but Google Photos has a search functionality. It even uses AI to recognize the items in the photos, which you can use in the search. A search for "tree" brings up photos of trees, "cat" brings up cats, and you can even tag people and pets and then search for their names. Very useful, right?
Well, it is sometimes. I recently had to remember what my wife's car license plate is. A quick search for "license plate" on google photos and luckily, I had taken a photo of her car that included the license plate in the frame. Success! On the other hand, I was trying to find some photos from a particular gathering with my friends. Searches for their names, names of the place, or stuff I know are in the picture turned up with nothing. I eventually had to painstakingly scroll through all photos to find the one I wanted.
This reminds me of 2 things. One is this article named To Organize The World's Information by @dkb868@twitter.com. One thing I found interesting on that article was that the concept of "the library" has been lost over the last few decades as a way to organize information. They define the library as a hierarchical, categorized directory of information. The article also talks about other organizational methods, and is worth a read.
=> https://dkb.io/post/organize-the-world-information To Organize The World's Information
=> https://nitter.net/dkb868 @dkb868@twitter.com
The other thing is the note taking software we're building at my workplace, Dendron. One of the core tenets of Dendron is that the information is hierarchical. Something the founder Kevin recognizes was that other note taking software make it easier to make new notes, but they don't support hierarchical structures which makes it hard to find those notes later. I've also experienced this, when I used other note taking software (or sticky notes!) I found that it was easy to just jot down a few notes, but they very quickly get lost or hard to find when you need them. A hierarchical organization makes it possible to actually find and reference the information later.
=> https://dendron.so/ Dendron
Requiring organization creates a barrier of entry to storing information, but what good is storing information if you can't retrieve the information later? This seems to work pretty well with Dendron. Would it not work for other things? Why not for taking photos? You of course want to be able to quickly snap a photo so you can record a moment before it's gone, but perhaps you could be required to organize your photos afterwards. Before modern cellphones & internet connected cameras, you'd have to get your photos developed or transfer them off an SD card: a step where you would have to (or have the opportunity to) organize your photos. I wonder if we cloud services could ask you to organize your photos before syncing them as well.

View File

@ -0,0 +1,60 @@
# Taking Backups with Duplicity
2015-05-16 00:00
I wanted to start taking backups for some time, but I haven't had the time to do any research and set everything up. After reading another horror story that was saved by backups, I decided to start taking some backups.
=> https://www.reddit.com/r/linuxmasterrace/comments/35ljcq/couple_of_days_ago_i_did_rm_rf_in_my_home/ horror story that was saved by backups
After doing some research on backup options, I decided on duplicity. The backups are compressed, encrypted and incremental, both saving space and ensuring security. It supports both local and ssh files(as well as many other protocols), so it has everything I need.
=> http://duplicity.nongnu.org/ duplicity
I first took a backup into my external hard drive, then VPS. The main problem I encountered was that duplicity uses paramiko for ssh, but it wasn't able to negotiate a key exchange algorithm with my VPS. Luckily, duplicity also supports pexpect, which uses OpenSSH. If you encounter the same problem, you just need to tell duplicity to use pexpect backend by prepending your url with `pexpect+`, like `pexpect+ssh://example.com`.
=> https://github.com/paramiko/paramiko paramiko
=> http://pexpect.sourceforge.net/pexpect.html pexpect
Duplicity doesn't seem to have any sort of configuration files of itself, so I ended up writing a small bash script to serve as a sort of configuration, and also keep me from running duplicity with wrong args. I kept forgetting to add an extra slash to `file://`, causing duplicity to backup my home directory into my home directory! :D
If anyone is interested, here's the script:
```bash
#!/bin/bash
if [[ $(id -u) != "0" ]]; then
read -p "Backup should be run as root! Continue? [y/N]" yn
case $yn in
[Yy]*) break;;
*) exit;;
esac
fi
if [[ $1 = file://* ]]; then
echo "Doing local backup."
ARGS="--no-encryption"
if [[ $1 = file:///* ]]; then
URL=$1
else
echo "Use absolute paths for backup."
exit 1
fi
elif [[ $1 = scp* ]]; then
echo "Doing SSH backup."
ARGS="--ssh-askpass"
URL="pexpect+$1"
else
echo "Unknown URL, use scp:// or file://"
exit 1
fi
if [[ -n "$1" ]]; then
duplicity $ARGS --exclude-filelist /home/kaan/.config/duplicity-files /home/kaan "$URL/backup"
else
echo "Please specify a location to backup into."
exit 1
fi
```

View File

@ -0,0 +1,174 @@
# Emacs as an operating system
2016-04-14 00:00
Emacs is sometimes jokingly called a good operating system with a bad text editor. Over the last year, I found myself using more and more of Emacs, so I decided to try out how much of an operating system it is. Of course, operating system here is referring to the programs that the user interacts with, although I would love to try out some sort of Emacs-based kernel.
# Emacs as a terminal emulator / multiplexer
Terminals are all about text, and Emacs is all about text as well. Not only that, but Emacs is also very good at running other processes and interacting with them. It is no surprise, I think, that Emacs works well as a terminal emulator.
Emacs comes out of the box with `shell` and `term`. Both of these commands run the shell of your choice, and give you a buffer to interact with it. Shell gives you a more emacs-y experience, while term overrides all default keymaps to give you a full terminal experience.
=> /img/emacs-terminal.png A terminal interface, with the outputs of the commands "ls" and "git status" displayed.
To use emacs as a full terminal, you can bind these to a key in your window manager. I'm using i3, and my keybinding looks like this:
```
bindsym $mod+Shift+Return exec --no-startup-id emacs --eval "(shell)"
```
You can also create a desktop file to have a symbol to run this on a desktop environment. Try putting the following text in a file at `~/.local/share/applications/emacs-terminal.desktop`:
```
[Desktop Entry]
Name=Emacs Terminal
GenericName=Terminal Emulator
Comment=Emacs as a terminal emulator.
Exec=emacs --eval '(shell)'
Icon=emacs
Type=Application
Terminal=false
StartupWMClass=Emacs
```
If you want to use term instead, replace `(shell)` above with `(term "/usr/bin/bash")`.
A very useful feature of terminal multiplexers is the ability to leave the shell running, even after the terminal is closed, or the ssh connection has dropped if you are connection over that. Emacs can also achieve this with it's server-client mode. To use that, start emacs with `emacs --daemon`, and then create a terminal by running `emacsclient -c --eval '(shell)'`. Even after you close emacsclient, since Emacs itself is still running, you can run the same command again to get back to your shell.
One caveat is that if there is a terminal/shell already running, Emacs will automatically open that whenever you try opening a new one. This can be a problem if you are using Emacs in server-client mode, or want to have multiple terminals in the same window. In that case, you can either do `M-x rename-uniquely` to change the name of the existing terminal, which will make Emacs create a new one next time, or you can add that to hook in your `init.el` to always have that behaviour:
```lisp
(add-hook 'shell-mode-hook 'rename-uniquely)
(add-hook 'term-mode-hook 'rename-uniquely)
```
# Emacs as a shell
Of course, it is not enough that Emacs works as a terminal emulator. Why not use Emacs as a shell directly, instead of bash/zsh? Emacs has you covered for that too. You can use eshell, which is a shell implementation, completely written in Emacs Lisp. All you need to do is press `M-x eshell`.
=> /img/eshell.png An Emacs window, split in two. Left side shows a command line with the command "cat README.rst >>> #<buffer *scratch*>". Right side shows the emacs scratch buffer, with the contents of the readme file displayed.
The upside is that eshell can evaluate and expand lisp expressions, as well as redirecting the output to Emacs buffers. The downside is however, eshell is not feature complete. It lacks some features such as input redirection, and the documentation notes that it is inefficient at piping output between programs.
If you want to use eshell instead of shell or term, you can replace `shell` in the examples of terminal emulator section with `eshell`.
# Emacs as a mail cilent
Zawinski's Law: Every program attempts to expand until it can read mail. Of course, it would be disappointing for Emacs to not handle mail as well.
=> http://www.catb.org/~esr/jargon/html/Z/Zawinskis-Law.html Zawinski's Law
Emacs already ships with some mail capability. To get a full experience however, I'd recommend using mu4e (mu for emacs). I have personally set up OfflineIMAP to retrieve my emails, and mu4e gives me a nice interface on top of that.
=> http://www.djcbsoftware.nl/code/mu/mu4e.html mu4e
=> http://www.offlineimap.org/ OfflineIMAP
=> /img/mu4e.png An emacs window, displaying several emails on top with titles like "Announcing Docker Cloud", or "Order #29659 shipped". An email titles "Add 'url' option to 'list' command' is selected, and the bottom half of the window displays the contents of this email. Email display includes "From" and "To" fields, "Date", "Flags", and the body of the email.
I'm not going to talk about the configurations of these programs, I'd recommend checking out their documentations. Before ending this section, I also want to mention mu4e-alert though.
=> https://github.com/iqbalansari/mu4e-alert mu4e-alert
# Emacs as a feed reader (RSS/Atom)
Emacs handles feeds very well too. The packages I'm using here are Elfeed and Elfeed goodies. Emacs can even show images in the feeds, so it covers everything I need from a feed reader.
=> https://github.com/skeeto/elfeed Elfeed
=> https://github.com/algernon/elfeed-goodies Elfeed goodies
=> /img/elfeed.png A window, with a list on the left displaying entries from "xkcd.com", "Sandra and Woo", and "The Codeless Code". An entry titled "Pipelines" is selected, and the right side of the window displays the contents of that XKCD.
# Emacs as a file manager
Why use a different program to manage your files when you can use Emacs? Emacs ships with dired, as well as image-dired. This gives you a file browser, with optional image thumbnail support.
# Emacs as a document viewer
Want to read a pdf? Need a program to do a presentation? Again, Emacs.
=> /img/docview.png An emacs window displaying a PDF file, titled "Clojure for the Brave and True.pdf". The page includes some clojure code, and talks about Emacs.
Emacs comes with DocView which has support for PDF, OpenDocument and Microsoft Office files. It works surprisingly well.
=> https://www.gnu.org/software/emacs/manual/html_node/emacs/Document-View.html DocView
Also, PDF Tools brings even more PDF viewing capabilities to Emacs, including annotations, text search and outline. After installing PDF Tools, Emacs has become my primary choice for reading PDF files.
=> https://github.com/politza/pdf-tools PDF Tools
# Emacs as a browser
Emacs comes out of box with eww, a text-based web browser with support for images as well.
=> https://www.gnu.org/software/emacs/manual/html_node/eww/index.html#Top eww
=> /img/eww.png An Emacs window, displaying the Wikipedia web page for Emacs.
Honestly, I don't think I'll be using Emacs to browse the web. But still, it is nice that the functionality is there.
# Emacs as a music player
Emacs can also act as a music player thanks to EMMS, Emacs MultiMedia System. If you are wondering, it doesn't play the music by itself but instead uses other players like vlc or mpd.
=> https://www.gnu.org/software/emms/ EMMS
It has support for playlists, and can show thumbnails as well. For the music types, it supports whatever the players it uses support, which means that you can basically use file type.
# Emacs as a IRC client
I don't use IRC a lot, but Emacs comes out of the box with support for that as well thanks to ERC.
=> https://www.emacswiki.org/emacs?action=browse;oldid=EmacsIrcClient;id=ERC ERC
=> /img/erc.png An Emacs window, displaying an IRC chat for "#emacs@freenode".
# Emacs as a text editor
Finally, Emacs also can work well as a text editor.
Emacs is a pretty fine text editor out of the box, but I want to mention some packages here.
First, multiple cursors. Multiple cursors mode allows you to edit text at multiple places at the same time.
=> https://github.com/magnars/multiple-cursors.el multiple cursors
I also want to mention undo-tree. It acts like a mini revision control system, allowing you to undo and redo without ever losing any text.
=> http://www.dr-qubit.org/emacs.php#undo-tree undo-tree
Another great mode is iy-go-to-char. It allows you to quickly jump around by going to next/previous occurrances of a character. It is very useful when you are trying to move around a line.
=> https://github.com/doitian/iy-go-to-char iy-go-to-char
Ace Jump Mode allows you to jump around the visible buffers. It can jump around based on initial characters of words, or jump to specific lines. It can also jump from one buffer to another, which is very useful when you have several buffers open in your screen.
=> https://github.com/winterTTr/ace-jump-mode/ Ace Jump Mode
=> /img/ace-jump-mode.png An emacs window, with Python code displayed. Several locations within the code are highlighted with different letters.
Finally, I want to mention ag.el, which is an Emacs frontend for the silver searcher. If you don't know about ag, it is a replacement for grep that recursively searches directories, and has some special handling for projects, and is very fast.
=> https://github.com/Wilfred/ag.el ag.el
# Emacs as an IDE
People sometimes compare Emacs to IDE's and complain that a text editor such as Emacs doesn't have enough features. What they are forgetting, of course, is that Emacs is an operating system, and we can have an IDE in it as well.
There are different packages for every language, so I'll be only speaking on language agnostic ones.
For interacting with git, magit is a wonderful interface.
=> http://magit.vc/ magit
=> /img/magit.png An emacs window, displaying the git log for a repository at the top, and the shortcuts for git commands such as "Apply", "Stage", "Unstage" below.
For auto-completion, Company mode works wonders. I rely heavily on completion while writing code, and company mode has support for anything I tried writing.
=> https://company-mode.github.io/ Company mode
If you like having your code checked as you type, flycheck has you covered. It has support for many tools and languages.
=> https://www.flycheck.org/ flycheck
=> /img/company-flycheck.png A C code file, with the letters "st" are written. A pop-up below the cursor displays options like "strcat", "strchr", "strcmp" and more.

View File

@ -0,0 +1,78 @@
# Getting Deus Ex GOTY Edition running on Linux
2022-03-12 00:00
I've been struggling with this for a few hours, so I might as well document how I did it.
I have a particular setup, which ended up causing issues. Most important are that I'm using Sway, a tiling Wayland compositor, and a flatpak install of Steam.
## Mouse doesn't move when the game is launched
It looks like there's a problem with the game window grabbing the cursor on my setup, so moving the mouse doesn't move the cursor in the game and if you move it too much to the side it takes you out of the game window.
The solution to this is using Gamescope, which is a nested Wayland compositor that makes the window inside it play nice with your actual compositor.
Because I'm using the flatpak install of Steam, I needed to install the flatpak version of gamescope. One catch here is that for me, this wouldn't work if I also had the flatpak MangoHud installed. The only solution I could come up with right now was to uninstall MangoHud.
=> https://github.com/flathub/com.valvesoftware.Steam.Utility.gamescope flatpak version of gamescope
```bash
flatpak remove org.freedesktop.Platform.VulkanLayer.MangoHud # if you have it installed
flatpak install com.valvesoftware.Steam.Utility.gamescope
```
Then, right click on the game and select properties, then in launch options type `gamescope -f -- %command%`. This will launch the game inside gamescope, and the cursor should move inside the game now.
## The game is too dark to see anything
It looks like the game relied on some old DirectX or OpenGL features or something, because once you do launch into the game, everything is extremely dark and hard to see. At first I was wondering how anyone could play the game like this, but it turns out that's not how the game is supposed to look!
I finally managed to solve this by following the installer steps for the Deus Ex CD on Lutris. Yeah, roundabout way to solve it, but it worked.
=> https://lutris.net/games/install/948/view Deus Ex CD on Lutris
First download the updated D3D9 and OpenGL renderers from the page, and extract them into the `System` folder inside the game.
```bash
cd "$HOME/.var/app/com.valvesoftware.Steam/.steam/steam/steamapps/common/Deus Ex/System"
wget https://lutris.net/files/games/deus-ex/dxd3d9r13.zip
wget https://lutris.net/files/games/deus-ex/dxglr20.zip
unzip dxd3d9r13.zip
unzip dxglr20.zip
```
Next, download and install the `1112fm` patch.
```bash
cd "$HOME/.var/app/com.valvesoftware.Steam/.steam/steam/steamapps/common/Deus Ex/System"
wget https://lutris.net/files/games/deus-ex/DeusExMPPatch1112fm.exe
env WINEPREFIX="$HOME/.var/app/com.valvesoftware.Steam/.steam/steam/steamapps/compatdata/6910/pfx/" wine DeusExMPPatch1112fm.exe
```
Follow the steps of the installer. It should automatically find where the game is installed. Once the install is done, launch the game, then head into the settings and pick "Display Settings", then "Rendering Device". In the renderer selection window, pick "Show all devices", and then select "Direct3D9 Support".
=> /img/deus-ex-render-settings.png A window with the title "Deus Ex" in a stylized font. Below it lists several options such as "Direct3D Support", "Direct3D9 Support", and "OpenGL Support". Direct3D9 is selected. Below are two radio buttons, with the one titled "Show all devices" selected.
Launch back into the game, head into the display settings again, pick your resolution, and restart the game. Then head into the display settings yet again, this time change the color depth to 32 bit. Restart once more. Yes, you do have to do them separately or the game doesn't save the color depth change for some reason. Finally, you can start playing!
=> /img/deus-ex-renderer-comparison.png A game screenshot displaying the Statue of Liberty in front of a cityscape. Closer to the player are wooden docks. The image is split down the middle, left side says "before" and is very dark, the right side says "after" and is much lighter.
## Other small issues
Here are a few more issues you might hit during this whole process:
> My cursor moves too fast!
You need to turn down the cursor speed. My mouse has buttons to adjust the speed on the fly, so I use those to turn down the speed.
> After changing resolution, I can't move my cursor!
Use the keyboard shortcuts (arrow keys and enter) to exit the game. It should work again when you restart.
> The cursor doesn't move when I open the game, even with gamescope!
I'm not fully sure why or how this happens, but a few things I found useful:
* When the game is launching, and it's showing the animation of the studio logo, don't click! Press escape to bring up the menu instead.
* Press escape to bring the menu up, then hit escape again to dismiss it. It sometimes starts working after that.
* Use the keyboard to exit the game and restart. It always works the next time for me.

View File

@ -0,0 +1,140 @@
# Managing my recipes with Dendron
2022-04-04 00:00
> This post is day 10 of me taking part in the
> #100DaysToOffload challenge.
=> https://100daystooffload.com/ #100DaysToOffload
I like to cook at home, but for a long time I never wrote down or saved any of my recipes. Because of that I would occasionally completely forget how to make something. My mom, and my grandmom write down their recipes in notebooks, but I want something more powerful and resilient than some pen and paper.
At first I tried writing down my recipes in Google Keep, but found it a bit tedious. That's where Dendron came in. Dendron is a knowledge management and note taking tool. It comes with a features that enhance the writing experience, but more importantly it has a lot of features that enhance the discoverability of what you wrote.
For reference, I have the repository for the recipes available publicly.
=> https://gitea.bgenc.net/kaan/recipes repository for the recipes
## Setup
Dendron is an extension for Visual Studio Code, so you'll need to install both. There's a great tutorial to go through, but I'm already experienced with it so I went ahead and created a new workspace that I called "recipes".
=> https://marketplace.visualstudio.com/items?itemName=dendron.dendron Dendron
Next, I created a template and a schema to help me write new recipes. The template is just a regular Dendron note, which I named `templates.recipe`.
```md
* Servings:
* Calories:
* Protein:
* Fiber:
## Ingredients
## Instructions
## Notes
```
This template immediately gives me the basic structure of a recipe. I have the ingredients and instructions, and then I have a place to put any additional notes about the recipe (for example, things I want to change next time I cook it, or how to serve it best). I also have a section at the top to fill out some nutritional information. I use the mobile app Cronometer to calculate that, although most of the time I don't bother because it's just a nice-to-have that I don't really need.
Next, here's my schema.
```yml
version: 1
imports: []
schemas:
- id: recipes
title: recipes
parent: root
children:
- id: bowls
title: bowls
namespace: true
template: templates.recipe
- id: baked
title: baked
namespace: true
template: templates.recipe
- id: dessert
title: dessert
namespace: true
template: templates.recipe
- id: misc
title: misc
namespace: true
template: templates.recipe
- id: soup
title: soup
namespace: true
template: templates.recipe
```
The schema helps me keep my recipes organized (and also automatically applies the template note). You can see that I have my recipes organized under `bowls` for stuff like rice and pasta dishes, `baked` for bread, pies and anything else where you bake everything, `dessert` and `soup` which are self descriptive, and `misc` which holds anything else like salad toppings.
## Publishing
I publish my recipes online, which makes it very easy to pull up a recipe when I'm cooking or at the grocery store.
=> https://bgenc.net/recipes/ recipes online
I use a self-hosted setup, so all I have to do is just run the Dendron CLI to build the site. To automate this process, I set up some VSCode tasks to build and publish the site.
```json
{
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
"version": "2.0.0",
"tasks": [
{
"label": "build site",
"type": "shell",
"command": "dendron publish export",
"options": {
"cwd": "${workspaceFolder}"
}
},
{
"label": "publish site",
"type": "shell",
"command": "rsync -av .next/out/ /var/www/recipes/",
"options": {
"cwd": "${workspaceFolder}"
},
"dependsOn": ["build site"],
"problemMatcher": []
},
]
}
```
I think before running these commands, you first have to run `dendron publish init && dendron publish build` first.
The first task builds the site using Dendron, and then the second task copies the generated static website to where I have it published. I'm running a web server on my desktop so this is just a folder, but `rsync` can also copy things over SSH if you host your site on a different machine. There are also tutorials for things like Github pages or Netlify.
=> https://wiki.dendron.so/notes/x0geoUlKJzmIs4vlmwLn3/ tutorials
Because I'm publishing under a subfolder (`.../recipes`), I also had to set `assetsPrefix` in my `dendron.yml` configuration file.
```yml
publishing:
assetsPrefix: "/recipes"
...
```
## Bonus: What do I cook this week?
My wife and I go shopping once a week, so every week we need to decide what we're going to eat this week. Sometimes it can be hard to pick something to eat though! Luckily, Dendron comes with a command `Dendron: Random Note` which shows you a random note. You can even configure it to only show some notes, which I used so it will only show me recipes.
```yml
commands:
randomNote:
include:
- "recipes"
```
Now when I'm having trouble picking, I can just use this command and get something to cook!

View File

@ -0,0 +1,52 @@
# Mass batch processing on the CLI
2022-03-19 00:00
> This post is day 4 of me taking part in the
> #100DaysToOffload challenge.
=> https://100daystooffload.com/ #100DaysToOffload
Some time ago, I needed to process a lot of video files with vlc. This is usually pretty easy to do, `for file in *.mp4 ; do ffmpeg ... ; done` is about all you need in most cases. However, sometimes the files you are trying to process are in different folders. And sometimes you want to process some files in a folder but not others. That's the exact situation I was in, and I was wondering if I needed to find some graphical application with batch processing capabilities so I can queue up all the processing I need.
After a bit of thinking though, I realized I could do this very easily with a simple shell script! That shell script lives in my mark-list repository.
=> https://github.com/SeriousBug/mark-list mark-list
The idea is simple, you use the command to mark a bunch of files. Every file you mark is saved into a file for later use.
```bash
$ mark-list my-video.mp4 # Choose a file
Marked 1 file.
$ mark-list *.webm # Choose many files
Marked 3 files.
$ cd Downloadsr
$ mark-list last.mpg # You can go to other directories and keep marking
```
You can mark a single file, or a bunch of files, or even navigate to other directories and mark files there.
Once you are done marking, you can recall what you marked with the same tool:
```bash
$ mark-list --list
/home/kaan/my-video.mp4
/home/kaan/part-1.webm
/home/kaan/part-2.webm
/home/kaan/part-3.webm
/home/kaan/Downloads/last.mpg
```
You can then use this in the command line. For example, I was trying to convert everything to `mkv` files.
```bash
for file in `mark-list --list` ; do ffmpeg -i "${file}" "${file}.mkv" ; done
```
It works! After you are done with it, you then need to clear out your marks:
```
mark-list --clear
```
Hopefully this will be useful for someone else as well. It does make it a lot easier to just queue up a lot of videos, and convert all of them overnight.

63
output/posts/mpv.gmi Normal file
View File

@ -0,0 +1,63 @@
# Motion Interpolation, 24 FPS to 60 FPS with mpv, VapourSynth and MVTools
2015-07-18 00:00
Watching videos at 60 FPS is great. It makes the video significantly smoother and much more enjoyable. Sadly, lots of movies and TV shows are still at 24 FPS. However, I recently discovered that it is actually possible to interpolate the extra frames by using motion interpolation, and convert a video from 24 FPS to 60 FPS in real time. While it is far from perfect, I think the visual artifacts are a reasonable tradeoff for high framerate.
Firstly, what we need is mpv with VapourSynth enabled, and MVTools plugin for VapourSynth. VapourSynth must be enabled while compiling mpv. I adopted an AUR package mpv-vapoursynth which you can use if you are on Arch. Otherwise, all you need to do is use `--enable-vapoursynth` flag when doing `./waf --configure`. They explain the compilation on their repository, so look into there if you are compiling yourself.
=> https://aur4.archlinux.org/packages/mpv-vapoursynth/ mpv-vapoursynth
=> https://github.com/mpv-player/mpv repository
After that, we need MVTools plugin for VapourSynth. This is available on Arch via vapoursynth-plugin-mvtools, otherwise you can find their repository here. There is also a PPA for Ubuntu where you can find `vapoursynth-extra-plugins`, but I haven't used it myself so I can't comment on it.
=> https://www.archlinux.org/packages/community/x86_64/vapoursynth-plugin-mvtools/ vapoursynth-plugin-mvtools
=> https://github.com/dubhater/vapoursynth-mvtools here
=> https://launchpad.net/~djcj/+archive/ubuntu/vapoursynth PPA for Ubuntu
After both of these are enabled, we need a script to use MVTools from VapourSynth. There is one written by Niklas Haas, which you can find here as mvtools.vpy. Personally, I tweaked the block sizes and precision to my liking, as well as removing the resolution limit he added. I'll put the modified version here:
=> https://github.com/haasn/gentoo-conf/blob/master/home/nand/.mpv/filters/mvtools.vpy mvtools.vpy
```python
# vim: set ft=python:
import vapoursynth as vs
core = vs.get_core()
clip = video_in
dst_fps = display_fps
# Interpolating to fps higher than 60 is too CPU-expensive, smoothmotion can handle the rest.
while (dst_fps > 60):
dst_fps /= 2
# Skip interpolation for 60 Hz content
if not (container_fps > 59):
src_fps_num = int(container_fps * 1e8)
src_fps_den = int(1e8)
dst_fps_num = int(dst_fps * 1e4)
dst_fps_den = int(1e4)
# Needed because clip FPS is missing
clip = core.std.AssumeFPS(clip, fpsnum = src_fps_num, fpsden = src_fps_den)
print("Reflowing from ",src_fps_num/src_fps_den," fps to ",dst_fps_num/dst_fps_den," fps.")
sup = core.mv.Super(clip, pel=1, hpad=8, vpad=8)
bvec = core.mv.Analyse(sup, blksize=8, isb=True , chroma=True, search=3, searchparam=1)
fvec = core.mv.Analyse(sup, blksize=8, isb=False, chroma=True, search=3, searchparam=1)
clip = core.mv.BlockFPS(clip, sup, bvec, fvec, num=dst_fps_num, den=dst_fps_den, mode=3, thscd2=12)
clip.set_output()
```
At this point, you should be able to try this out as suggested in the script. To set this up more permanently, I'd suggest placing this script as `~/.config/mpv/mvtools.vpy`, and also writing the following as `~/.config/mpv/mpv.conf`:
```
hwdec=no
vf=vapoursynth=~/.config/mpv/mvtools.vpy
```
Now, whenever you open mpv, it will always use motion interpolation.
The result is fairly good. I noticed some significant artifacts while watching anime, but it works well with movies. I'm guessing that it is harder to track the motion in animations since they are generally exaggerated.
One thing to keep in mind, however, is performance. With `rel=2`, viewing a 1080p video results in around 90% CPU usage across all cores and 1.6 GBs of ram on my Intel i7 4700MQ. With `rel=1`, CPU usage goes down to about 60% per core. This process is very heavy on the processor, and you may have trouble unless you have a fast CPU.

View File

@ -0,0 +1,27 @@
# My response to Aurynn Shaw's "Contempt Culture" post
2022-03-27 00:00
> This post is day 6 of me taking part in the
> #100DaysToOffload challenge.
=> https://100daystooffload.com/ #100DaysToOffload
I recently came across Aurynn Shaw's article on "Contempt Culture". I'm a bit late to the party, but I wanted to talk about this too.
=> https://blog.aurynn.com/2015/12/16-contempt-culture/ Aurynn Shaw's article on "Contempt Culture"
Aurynn's article talks about how some programming languages are considered inferior, and programmers using these languages are considered less competent. It's a good article, and you should take a look at it if you haven't.
## my thoughts
One thing I've come to realize over the years is that there are really no "bad programming languages". Ignoring esolangs like brainfuck which are not really meant to be used for anything serious, most programming languages are designed to fit a niche. I'm using the term like it's used in ecology: every programming language has a place in the ecosystem of technology and programming.
PHP is bad? PHP certainly has its drawbacks, but it also has its advantages. "Drop these files into a folder and it works" is an amazing way to get started programming. It's also a great way to inject a bit of dynamic content into otherwise static pages. In fact, it's simpler and more straightforward solution than building a REST API and a web app where you have to re-invent server side rendering just to get back to where PHP already was!
That's not to say PHP is perfect or the best language to use. It's a language I personally don't like. But that doesn't make it a bad or "stupid" programming language. At worst it's a programming language that doesn't fit my needs. If I extrapolate that and say that PHP is a bad language, that would instead show my ego. Do I really think I'm so great that anything I don't like is just immediately bad? Something Aurynn said resonates with me here:
> It didn't matter that it was (and remains) difficult to read, it was that we
> were better for using it.
I just want to conclude this with one thing: next time you think a programming language or tool or whatever is bad, think to yourself whether that's because it doesn't feel cool or because you saw others making fun of it, or because you actually evaluated the pros and cons and came up with a calculated decision.

19
output/posts/pass.gmi Normal file
View File

@ -0,0 +1,19 @@
# Switching to pass
2015-03-30 00:00
For some time, I used LastPass to store my passwords. While LastPass works well, it doesn't fit into the keyboard driven setup I have. I have been looking into alternatives for some time, I looked into KeePassX but just like LastPass, it doesn't give me any ways to set up keyboard shortcuts. On the other hand, and I recently came across pass, and it provides everything I want.
=> http://www.passwordstore.org/ pass
Pass uses GPG keys to encrypt the passwords, and git to keep revisions and backups. It integrates well with the shell, and there is a dmenu script, a Firefox plugin and an Android app. All the passwords are just GPG enrypted files, stored in some folders anyway, so you don't need anything special to work with them.
=> /img/passmenu.png A terminal window with the command "pass ls archlinux.org". The output lists "SeriousBug@Gmail.com" and "SeriousBug". Above the terminal is a bar, with "archlin" typed on the left, and the option "archlinux.org/SeriousBug@Gmail.com" displayed on the right.
So first, I needed to migrate my passwords from LastPass to pass. The website lists some scripts for migration, but sadly I missed that when I first looked at the page. So I decided to write a python script to handle the migration myself. It inserts all passwords in `domain/username` format, and if there is any extra data written, it is added after the password as well. Secure notes are placed into their own folder, and any "Generated Password for ..." entries are skipped. If you're migrating from LastPass to pass, feel free to give it a try. If you are taking an export from their website however, do make sure that there is no whitespace before and after the csv.
=> https://gist.github.com/SeriousBug/e9f33873d10ad944cbe6 python script to handle the migration
=> /img/password_store.png An Android phone screenshot. A search bar at the top displays "archlin" typed in, and below the search bar the options "archlinux.org" and "wiki.archlinux.org" are listed.
I certainly recommend trying out pass. It works very well, and it fits in with the unix philosophy.

116
output/posts/raid.gmi Normal file
View File

@ -0,0 +1,116 @@
# My local data storage setup
2022-03-10 00:00
Recently, I've needed a bit more storage. In the past I've relied on Google Drive, but if you need a lot of space Google Drive becomes prohibitively expensive. The largest option available, 2 TB, runs you 960 a year with B2 and a whopping $4000 a year with S3.
Luckily in reality, the cost of storage per GB has been coming down steadily. Large hard drives are cheap to come by, and while these drives are not incredibly fast, they are much faster than the speed of my internet connection. Hard drives it is then!
While I could get a very large hard drive, it's generally a better idea to get multiple smaller hard drives. That's because these drives often offer a better $/GB rate, but also because it allows us to mitigate the risk of data loss. So after a bit of search, I found these "Seagate Barracuda Compute 4TB" drives. You can find them on Amazon or BestBuy.
=> https://www.amazon.com/gp/product/B07D9C7SQH/ Amazon
=> https://www.bestbuy.com/site/seagate-barracuda-4tb-internal-sata-hard-drive-for-desktops/6387158.p?skuId=6387158 BestBuy
These hard drives are available for 420, plus a bit more for SATA cables. Looking at Backblaze Hard Drive Stats, I think it's fair to assume these drives will last at least 5 years. Dividing the cost by the expected lifetime, that gets me $84 per year, far below what the cloud storage costs! It's of course not as reliable, and it requires maintenance on my end, but the difference in price is just too far to ignore.
=> https://www.backblaze.com/blog/backblaze-drive-stats-for-2021/ Backblaze Hard Drive Stats
## Setup
I decided to set this all up inside my desktop computer. I have a large case so fitting all the hard drives in is not a big problem, and my motherboard does support 6 SATA drives (in addition to the NVMe that I'm booting off of). I also run Linux on my desktop computer, so I've got all the required software available.
For the software side of things, I decided to go with `mdadm` and `ext4`. There are also other options available like ZFS (not included in the linux kernel) or btrfs (raid-5 and raid-6 are known to be unreliable), but this was the setup I found the most comfortable and easy to understand for me. How it works is that `mdadm` combines the disks and presents it as a block device, then `ext4` formats and uses the block device the same way you use it with any regular drive.
### Steps
I was originally planning to write the steps I followed here, but in truth I just followed whatever the ArchLinux wiki told me. So I'll just recommend you follow that as well.
=> https://wiki.archlinux.org/title/RAID#Installation ArchLinux wiki
The only thing I'll warn you is that the wiki doesn't clearly note just how long this process takes. It took almost a week for the array to build, and until the build is complete the array runs at a reduced performance. Be patient, and just give it some time to finish. As a reminder, you can always check the build status with `cat /dev/mdstat`.
## Preventative maintenance
Hard drives have a tendency to fail, and because RAID arrays are resilient, the failures can go unnoticed. You **need** to regularly check that the array is okay. Unfortunately, while there are quite a few resources online on how to set up RAID, very few of them actually talk about how to set up scrubs (full scans to look for errors) and error monitoring.
For my setup, I decided to set up systemd to check and report issues. For this, I first set up 2 timers: 1 that checks if there are any reported errors on the RAID array, and another that scrubs the RAID array. Systemd timers are 2 parts, a service file and a timer file, so here's all the files.
* `array-scrub.service` ```toml [Unit] Description=Scrub the disk array After=multi-user.target OnFailure=report-failure-email@array-scrub.service
[Service] Type=oneshot User=root ExecStart=bash -c '/usr/bin/echo check > /sys/block/md127/md/sync_action'
[Install] WantedBy=multi-user.target
```
- `array-scrub.timer`
```toml
[Unit]
Description=Periodically scrub the array.
[Timer]
OnCalendar=Sat *-*-* 05:00:00
[Install]
WantedBy=timers.target
```
The timer above is the scrub operation, it tells RAID to scan the drives for errors. It actually takes up to a couple days in my experience for the scan to complete, so I run it once a week.
* `array-report.service` ```toml [Unit] Description=Check raid array errors that were found during a scrub or normal operation and report them. After=multi-user.target OnFailure=report-failure-email@array-report.service
[Service] Type=oneshot ExecStart=/usr/bin/mdadm -D /dev/md127
[Install] WantedBy=multi-user.target
```
- `array-report.timer`
```toml
[Unit]
Description=Periodically report any issues in the array.
[Timer]
OnCalendar=daily
[Install]
WantedBy=timers.target
```
And this timer above checks the RAID array status to see if there were any errors found. This timer runs much more often (once a day), because it's instant, and also because RAID can find errors during regular operation even when you are not actively running a scan.
### Error reporting
Another important thing here is this line in the service file:
```toml
OnFailure=report-failure-email@array-report.service
```
The automated checks are of no use if I don't know when something actually fails. Luckily, systemd can run a service when another service fails, so I'm using this to report failures to myself. Here's what the service file looks like:
* `report-failure-email@.service` ```toml [Unit] Description=status email for %i to user
[Service] Type=oneshot ExecStart=/usr/local/bin/systemd-email address %i User=root
```
- `/usr/local/bin/systemd-email`
```sh
#!/bin/sh
/usr/bin/sendmail -t <<ERRMAIL
To: homelab@bgenc.net
From: systemd <root@$HOSTNAME>
Subject: Failure on $2
Content-Transfer-Encoding: 8bit
Content-Type: text/plain; charset=UTF-8
$(systemctl status --lines 100 --no-pager "$2")
ERRMAIL
```
The service just runs this shell script, which is just a wrapper around sendmail. The `%i` in the service is the part after `@` when you use the service, you can see that the `OnFailure` hook puts `array-report` after the `@` which then gets passed to the email service, which then passes it on to the mail script.
To send emails, you also need to set up `sendmail`. I decided to install msmtp, and set it up to use my GMail account to send me an email.
=> https://wiki.archlinux.org/title/Msmtp msmtp
To test if the error reporting works, edit the `array-report.service` and change the line `ExecStart` line to `ExecStart=false`. Then run the report service with `systemd start array-report.service`, you should now get an email letting you know that the `array-report` service failed, and attaches the last 100 lines of the service status to the email.

158
output/posts/rss.xml Normal file
View File

@ -0,0 +1,158 @@
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
<channel>
<title>Kaan Barmore-Genç - posts</title>
<link>gemini://gemini.bgenc.net/posts</link>
<description>Recent content in posts on Kaan Barmore-Genç</description>
<generator>gmnhg</generator>
<language>en-us</language>
<copyright>Contents are licensed under CC 4.0 unless specified otherwise.</copyright>
<lastBuildDate>Sun, 10 Apr 2022 20:41:59 -0400</lastBuildDate>
<atom:link href="gemini://gemini.bgenc.net/posts/rss.xml" rel="self" type="application/rss+xml" />
<item>
<title>Managing my recipes with Dendron</title>
<link>gemini://gemini.bgenc.net/posts/managing-my-recipes-with-dendron.gmi</link>
<pubDate>Mon, 04 Apr 2022 00:00:00 +0000</pubDate>
<guid>gemini://gemini.bgenc.net/posts/managing-my-recipes-with-dendron.gmi</guid>
<description></description>
</item>
<item>
<title>Black Crown Initiate</title>
<link>gemini://gemini.bgenc.net/posts/black-crown-initiate.gmi</link>
<pubDate>Sat, 02 Apr 2022 00:00:00 +0000</pubDate>
<guid>gemini://gemini.bgenc.net/posts/black-crown-initiate.gmi</guid>
<description></description>
</item>
<item>
<title>An introduction to Bulgur Cloud - simple self hosted cloud storage</title>
<link>gemini://gemini.bgenc.net/posts/bulgur-cloud-intro.gmi</link>
<pubDate>Tue, 29 Mar 2022 00:00:00 +0000</pubDate>
<guid>gemini://gemini.bgenc.net/posts/bulgur-cloud-intro.gmi</guid>
<description></description>
</item>
<item>
<title>Do kids not know computers now?</title>
<link>gemini://gemini.bgenc.net/posts/do-kids-know-computers-today.gmi</link>
<pubDate>Mon, 28 Mar 2022 00:00:00 +0000</pubDate>
<guid>gemini://gemini.bgenc.net/posts/do-kids-know-computers-today.gmi</guid>
<description></description>
</item>
<item>
<title>My response to Aurynn Shaw&#39;s &#34;Contempt Culture&#34; post</title>
<link>gemini://gemini.bgenc.net/posts/my-response-to-contempt-culture.gmi</link>
<pubDate>Sun, 27 Mar 2022 00:00:00 +0000</pubDate>
<guid>gemini://gemini.bgenc.net/posts/my-response-to-contempt-culture.gmi</guid>
<description></description>
</item>
<item>
<title>Solving `app_data` or `ReqData` missing in requests for actix-web</title>
<link>gemini://gemini.bgenc.net/posts/actix-web-import-issue.gmi</link>
<pubDate>Sat, 26 Mar 2022 00:00:00 +0000</pubDate>
<guid>gemini://gemini.bgenc.net/posts/actix-web-import-issue.gmi</guid>
<description></description>
</item>
<item>
<title>Mass batch processing on the CLI</title>
<link>gemini://gemini.bgenc.net/posts/mass-batch-processing-on-the-CLI.gmi</link>
<pubDate>Sat, 19 Mar 2022 00:00:00 +0000</pubDate>
<guid>gemini://gemini.bgenc.net/posts/mass-batch-processing-on-the-CLI.gmi</guid>
<description></description>
</item>
<item>
<title>Running graphical user services with systemd</title>
<link>gemini://gemini.bgenc.net/posts/running-graphical-user-services-with-systemd.gmi</link>
<pubDate>Fri, 18 Mar 2022 00:00:00 +0000</pubDate>
<guid>gemini://gemini.bgenc.net/posts/running-graphical-user-services-with-systemd.gmi</guid>
<description></description>
</item>
<item>
<title>State of Rust GUIs</title>
<link>gemini://gemini.bgenc.net/posts/state-of-rust-GUIs.gmi</link>
<pubDate>Thu, 17 Mar 2022 00:00:00 +0000</pubDate>
<guid>gemini://gemini.bgenc.net/posts/state-of-rust-GUIs.gmi</guid>
<description></description>
</item>
<item>
<title>A little type system trick in Rust</title>
<link>gemini://gemini.bgenc.net/posts/rust-typesystem-tricks.gmi</link>
<pubDate>Tue, 15 Mar 2022 00:00:00 +0000</pubDate>
<guid>gemini://gemini.bgenc.net/posts/rust-typesystem-tricks.gmi</guid>
<description></description>
</item>
<item>
<title>Getting Deus Ex GOTY Edition running on Linux</title>
<link>gemini://gemini.bgenc.net/posts/getting-deus-ex-running-on-linux.gmi</link>
<pubDate>Sat, 12 Mar 2022 00:00:00 +0000</pubDate>
<guid>gemini://gemini.bgenc.net/posts/getting-deus-ex-running-on-linux.gmi</guid>
<description></description>
</item>
<item>
<title>My local data storage setup</title>
<link>gemini://gemini.bgenc.net/posts/raid.gmi</link>
<pubDate>Thu, 10 Mar 2022 00:00:00 +0000</pubDate>
<guid>gemini://gemini.bgenc.net/posts/raid.gmi</guid>
<description></description>
</item>
<item>
<title>Emacs as an operating system</title>
<link>gemini://gemini.bgenc.net/posts/emacs-as-an-os.gmi</link>
<pubDate>Thu, 14 Apr 2016 00:00:00 +0000</pubDate>
<guid>gemini://gemini.bgenc.net/posts/emacs-as-an-os.gmi</guid>
<description></description>
</item>
<item>
<title>Emacs and extensibility</title>
<link>gemini://gemini.bgenc.net/posts/customizing-emacs.gmi</link>
<pubDate>Tue, 06 Oct 2015 00:00:00 +0000</pubDate>
<guid>gemini://gemini.bgenc.net/posts/customizing-emacs.gmi</guid>
<description></description>
</item>
<item>
<title>Motion Interpolation, 24 FPS to 60 FPS with mpv, VapourSynth and MVTools</title>
<link>gemini://gemini.bgenc.net/posts/mpv.gmi</link>
<pubDate>Sat, 18 Jul 2015 00:00:00 +0000</pubDate>
<guid>gemini://gemini.bgenc.net/posts/mpv.gmi</guid>
<description></description>
</item>
<item>
<title>Taking Backups with Duplicity</title>
<link>gemini://gemini.bgenc.net/posts/duplicity.gmi</link>
<pubDate>Sat, 16 May 2015 00:00:00 +0000</pubDate>
<guid>gemini://gemini.bgenc.net/posts/duplicity.gmi</guid>
<description></description>
</item>
<item>
<title>Writing a Program in Bash</title>
<link>gemini://gemini.bgenc.net/posts/bash.gmi</link>
<pubDate>Sun, 12 Apr 2015 00:00:00 +0000</pubDate>
<guid>gemini://gemini.bgenc.net/posts/bash.gmi</guid>
<description></description>
</item>
<item>
<title>Switching to pass</title>
<link>gemini://gemini.bgenc.net/posts/pass.gmi</link>
<pubDate>Mon, 30 Mar 2015 00:00:00 +0000</pubDate>
<guid>gemini://gemini.bgenc.net/posts/pass.gmi</guid>
<description></description>
</item>
</channel>
</rss>

View File

@ -0,0 +1,78 @@
# Running graphical user services with systemd
2022-03-18 00:00
> This post is day 3 of me taking part in the
> #100DaysToOffload challenge.
=> https://100daystooffload.com/ #100DaysToOffload
I've recently switched from KDE Plasma to sway as my window manager. I had a problem with the change though: the amazing kdeconnect service weren't working!
My first attempt at fixing this was to just add a lines into sway config to launch it along with sway.
```
exec /usr/lib/kdeconnectd
```
Looks simple enough. But for some reason, `kdeconnectd` would just disappear after a while. It would appear to run at startup, and then an hour or two later I pull up the kdeconnect app on my phone and it would tell me that my computer is disconnected.
The biggest issue here was that I had no way to see why kdeconnect had failed. In comes systemd to save the day. Systemd is a service manager, so it will actually maintain the logs for these services. That means if kdeconnect is crashing, I can check the logs for kdeconnect to see why it crashed. I can also configure it to auto-restart after a crash if I want to.
To launch graphical applications with systemd though, you need to pass the appropriate environment variables to it so it knows how to launch new windows. I added this line to my sway config to do exactly that.
```
# Pass all variables to dbus & systemd to run graphical user services
exec dbus-update-activation-environment --all --systemd
```
Next, we need to write a service files to run the application. This is easier than it sounds, here's the service file I wrote for kdeconnect:
```
[Unit]
Description=Run kdeconnectd.
After=graphical-session.target
StartLimitIntervalSec=600
StartLimitBurst=5
[Service]
Type=basic
ExecStart=/usr/lib/kdeconnectd
Restart=on-failure
RestartSec=5s
[Install]
WantedBy=graphical-session.target
```
I saved this as `~/.config/systemd/user/kdeconnectd.service`. Finally, enabled it for my user with `systemctl --user enable kdeconnectd.service` and then restarted.
The service is configured to automatically restart on failure, but not if it failed more than 5 times in the last 10 minutes. Systemd also waits 5 seconds before trying to restart the failed service. This way if it crashes for some reason, it will restart. But if it keeps crashing rapidly, it won't keep trying to restart which could take up too much system resources.
I can now check how the service is doing with systemd!
```
Warning: The unit file, source configuration file or drop-ins of kdeconnectd.service changed on disk. Run 'systemctl --user daemon-reload>
● kdeconnectd.service - Run kdeconnectd.
Loaded: loaded (/home/kaan/.config/systemd/user/kdeconnectd.service; enabled; vendor preset: enabled)
Active: active (running) since Thu 2022-03-17 14:18:15 EDT; 1h 46min ago
Main PID: 2188363 (kdeconnectd)
Tasks: 6 (limit: 77007)
Memory: 24.2M
CPU: 2.440s
CGroup: /user.slice/user-1000.slice/user@1000.service/app.slice/kdeconnectd.service
└─2188363 /usr/lib/kdeconnectd
Mar 17 14:20:58 eclipse systemd[817]: /home/kaan/.config/systemd/user/kdeconnectd.service:6: Unknown key name 'type' in section 'Service'>
Mar 17 15:16:11 eclipse kdeconnectd[2188363]: QObject::connect(KWayland::Client::Registry, Unknown): invalid nullptr parameter
Mar 17 15:16:11 eclipse kdeconnectd[2188363]: kdeconnect.plugin.battery: No Primary Battery detected on this system. This may be a bug.
Mar 17 15:16:11 eclipse kdeconnectd[2188363]: kdeconnect.plugin.battery: Total quantity of batteries found: 0
Mar 17 15:23:26 eclipse kdeconnectd[2188363]: QObject::connect(KWayland::Client::Registry, Unknown): invalid nullptr parameter
Mar 17 15:23:26 eclipse kdeconnectd[2188363]: kdeconnect.plugin.battery: No Primary Battery detected on this system. This may be a bug.
Mar 17 15:23:26 eclipse kdeconnectd[2188363]: kdeconnect.plugin.battery: Total quantity of batteries found: 0
Mar 17 15:23:26 eclipse kdeconnectd[2188363]: QMetaObject::invokeMethod: No such method KIO::StoredTransferJob::slotDataReqFromDevice()
Mar 17 15:24:35 eclipse kdeconnectd[2188363]: QMetaObject::invokeMethod: No such method KIO::StoredTransferJob::slotDataReqFromDevice()
Mar 17 15:57:29 eclipse systemd[817]: /home/kaan/.config/systemd/user/kdeconnectd.service:9: Unknown key name 'type' in section 'Service'>
```
A bunch of warnings so far, but no crashes yet. But if it does crash again, I'll finally know why.

View File

@ -0,0 +1,37 @@
# A little type system trick in Rust
2022-03-15 00:00
> This post is day 1 of me taking part in the
> #100DaysToOffload challenge.
=> https://100daystooffload.com/ #100DaysToOffload
While working on a small project recently, I ended up writing this type in Rust.
```rust
type ImageData = Arc<Mutex<Option<ImageBuffer<Rgba<u8>, Vec<u8>>>>>;
```
Even though I wrote it myself, it actually took me a bit after writing it to figure out what this type was doing so I wanted to write about it.
Let me start from outside-in, the first type we have is `Arc`. `Arc` stands for "atomic reference counting". Reference counting is a method to handle ownership of the data, or in other words to figure out when the data needs to be freed. Garbage collected languages do this transparently in the background, but in Rust we explicitly need to state that we want it. Atomic means this is done using atomic operations, so it is thread safe. In my case, I needed this because this data was going to be shared between multiple threads, and I didn't know exactly when I would be "done" with the data.
=> https://en.wikipedia.org/wiki/Reference_counting Reference counting
=> https://en.wikipedia.org/wiki/Linearizability#Primitive_atomic_instructions atomic operations
The next type is `Mutex`, which means mutual exclusion or locking. Locks are used to restrict access to data to a single thread at a time. That means whatever type is inside of this is not thread safe, so I'm using the lock to protect it. Which is true!
=> https://en.wikipedia.org/wiki/Lock_(computer_science) mutual exclusion
The type after that is `Option`. This basically means "nullable", there may or may not be a thing inside this. The interesting thing here is that this is a sum type, so Rust helps remind us that this is nullable without introducing a nullability concept to the language. It's just part of the type system!
=> https://en.wikipedia.org/wiki/Tagged_union sum type
Then we have `ImageBuffer`, a type from the popular image crate. Not much to talk about with this, that's the data I wanted to store.
=> https://docs.rs/image/latest/image/index.html image crate
The next thing that *is* interesting is the `Rgba<u8>` and `Vec<u8>` inside the image buffer. What that means (and I'm speculating here because I'm lazy/too busy to check), is that `Rgba` is just a basic wrapper type (or a "newtype"). It makes the compiler enforce the type of the image data that's stored in this image buffer, so the user doesn't mix up different data types. Similar for `Vec<u8>`, (I think) it means that the data inside this buffer is stored in a vector.
Finally, `u8` is probably self descriptive, the pixels and the vector are made out of 8-bit unsigned integers.

Some files were not shown because too many files have changed in this diff Show More