You are not logged in.
"oldie is a python 3 script that finds the file with the latest access time for each package, and displays the list of pkgs you have installed from oldest to latest access time.
This is mainly to help find packages you rarely use."
https://github.com/gostrc/oldie
#! /usr/bin/env python3
import stat
import os
import subprocess
import datetime
# bytes to unicode, for convenience
def b2u(l):
return map(lambda x: x.decode('utf-8'), l)
# holds 'pkgname': latest_access_time
latest_pkg_access = {}
# iterate per package
for pkg in b2u(subprocess.check_output(["pacman", "-Qq"]).strip().split(b'\n')):
access_times = []
# iterate per file in that package
for pkg_file in b2u(subprocess.check_output(["pacman", "-Qql", pkg]).strip().split(b'\n')):
try:
file_stat = os.stat(pkg_file)
except OSError as e:
#print(e)
# symlinks or non existing files..
continue
if stat.S_ISREG(file_stat.st_mode):
access_times.append(file_stat[stat.ST_ATIME])
# needed check because xorg-font-utils has 0 files...
if len(access_times):
latest_pkg_access[pkg] = max(access_times)
# get the length of the longest pkgname used later for pretty formatting
max_name_len = max(map(len, latest_pkg_access.keys()))
# show pkgs with the oldest "latest access" time first
for pkg in sorted(latest_pkg_access, key=latest_pkg_access.get):
date = datetime.datetime.fromtimestamp(latest_pkg_access[pkg])
print('{0:{width}}'.format(pkg, width=max_name_len), date)
Offline
Just in case you like to play complete random albums, assuming each album is in a separate directory. Also support for Windows playlist files since my mp3 player requires that.
#!/usr/bin/env ruby
# a script to generate a random m3u playlist for any type of file
class CreateRandomPlaylist
def initialize
@windows = ARGV.include?('--windows')
@top_level_dir = ARGV[0]
@playlist_dir = File.dirname(@top_level_dir)
@albums = Array.new
setPlaylistFilename()
setExtensions()
setActiveDir()
searchAlbums(File.basename(@top_level_dir))
createRandomPlaylist()
end
# the playlist is saved in the directory above the top level dir.
def setPlaylistFilename
date = Time.now.strftime("%d-%m-%Y")
filename = File.basename(@top_level_dir) + '_playlist' + '_' + date + '.m3u'
@playlist_filename = File.join(@playlist_dir, filename)
end
def setExtensions
@extensions = Array.new
ARGV[1].split(',').each{|extension| @extensions << '.' + extension}
end
def setActiveDir
Dir.chdir(@playlist_dir)
end
def searchAlbums(base)
currentdir = File.join(@playlist_dir, base)
Dir.entries(currentdir).sort.each do |entry|
next if entry[0] == '.'
filename = File.join(base, entry)
if File.directory?(File.join(@playlist_dir, filename))
searchAlbums(filename)
elsif @extensions.include?(File.extname(filename))
@albums << File.dirname(filename)
break
end
end
end
def createRandomPlaylist
@playlist = File.open(@playlist_filename, 'w')
while @albums.size > 0
# three points means to, but not including
random_album = @albums.delete_at(rand(0...@albums.size))
puts "Random album: #{random_album}"
addTracksToPlaylist(random_album)
end
@playlist.close()
end
private
def convertToWindows(filename)
@windows ? filename.gsub('/', '\\') : filename
end
def addTracksToPlaylist(album)
Dir.entries(album).sort.each do |filename|
if @extensions.include?(File.extname(filename))
@playlist.puts(convertToWindows(File.join(album, filename)))
end
end
end
end
def exitWithWarning
puts "Please pass the correct parameters:"
puts "First argument: The top level audio directory"
puts "Second argument: The extensions separated by a semicolon (;)"
puts "Third argument: --windows (to use backward slashes dir\filename)"
puts ""
puts "Example: ./create-random-playlist /mnt/data/Music mp3,flac,ogg --windows"
exit()
end
if ARGV.size < 2
puts "Wrong amount of arguments passed!"
exitWithWarning()
elsif !File.directory?(ARGV[0])
puts "No valid directory!"
exitWithWarning()
elsif ARGV[1].size < 2
puts "No valid extension!"
exitWithWarning()
else
CreateRandomPlaylist.new()
end
Offline
So here's a little, dirty bash function, that lets you search the web from cl.
This is (mostly) for UK web as I live in England, but you can easily replace urls.
Just add this to your .bashrc.## Search interfaces # Google search bash function google() { search="" for term in $*; do search="$search%20$term" done xdg-open "http://www.google.co.uk/search?q=$search" } # Google images search bash function images() { search="" for term in $*; do search="$search%20$term" done xdg-open "http://www.google.co.uk/search?tbm=isch&hl=en&source=hp&biw=1366&bih=679&q=$search" } # YouTube search bash function videos() { search="" for term in $*; do search="$search%20$term" done xdg-open "http://www.youtube.com/results?search_query=$search" } # Google books search bash function book() { search="" for term in $*; do search="$search%20$term" done xdg-open "http://www.google.co.uk/search?q=$search&btnG=Search+Books&tbm=bks&tbo=1" } #Bing search bash function bing() { search="" for term in $*; do search="$search%20$term" done xdg-open "http://www.bing.com/search?q=$search" } # DuckDuckGo search bash function ddg() { search="" for term in $*; do search="$search%20$term" done xdg-open "http://duckduckgo.com/?q=$search" } # Wikipedia English search bash function wiki() { search="" for term in $*; do search="$search%20$term" done xdg-open "http://en.wikipedia.org/w/index.php?search=$search" } # Android Play Store search bash function android() { search="" for term in $*; do search="$search%20$term" done xdg-open "http://play.google.com/store/search?q=$search" } # Grooveshark music bash search function music() { search="" for term in $*; do search="$search%20$term" done xdg-open "http://grooveshark.com/#!/search?q=$search" } # Amazon UK search bash function amazon() { search="" for term in $*; do search="$search%20$term" done xdg-open "http://www.amazon.co.uk/s/ref=nb_sb_noss_1?url=search-alias%3Daps&field-keywords=$search&x=0&y=0" } # Amazon Kindle search bash function kindle() { search="" for term in $*; do search="$search%20$term" done xdg-open "http://www.amazon.co.uk/s/ref=nb_sb_noss_1?url=search-alias%3Ddigital-text&field-keywords=$search&x=0&y=0" } # Amazon MP3 search bash function mp3store() { search="" for term in $*; do search="$search%20$term" done xdg-open "http://www.amazon.co.uk/s/ref=nb_sb_ss_i_0_8?url=search-alias%3Ddigital-music&field-keywords=$search&x=0&y=0" }
Note: The search results will open in your default browser.
P.S. I have couple more there, but this should be enough, as an example.
Going to use this, thanks (After I remove bing ofc..)
Offline
A script I made to easily merge .pacnew files with vimdiff
#!/bin/bash
if ! which locate &> /dev/null
then
echo mlocate is required for this script to work!
fi
if ! which vimdiff &> /dev/null
then
echo vim is required for this script to work!
fi
echo "=============================
== .pacnew merge script ==
=============================
== vim commands: ==
== do - Obtain changes ==
== zo - Open folded text ==
== zc - Close folded text ==
=============================
"
sudo updatedb
files=$(locate ".pacnew" | sed 's/\.pacnew//')
if [ -z $files ]
then
echo "No .pacnew files found!"
else
echo "$(echo "$files" | wc -l) file(s) found."
echo "Press enter to start editing"
echo
read
fi
for file in $files; do
sudo vimdiff $file $file.pacnew
echo -n "Do you want to delete $file.pacnew ? [y/N] "
read choice
if [ $choice == 'y' ] || [ $choice == 'Y' ]
then
echo Deleting $file.pacnew
sudo rm $file.pacnew
else
echo Not deleting $file.pacnew
fi
done;
Offline
So here's a little, dirty bash function, that lets you search the web from cl.
Maybe there's an obvious reason I'm not seeing, but why this when you could just use surfraw?
Registed Linux User 483618
Offline
Generate a wallpaper for my multi-monitor (portrait + landscape) set-up.
#!/bin/zsh
# Set the wallpaper directories
WALLPAPERDIR=/usr/share/wallpapers/rotation
LANDDIR=$WALLPAPERDIR/landscape
PORTDIR=$WALLPAPERDIR/portrait
# Pick random landscape and portrait wallpapers.
landscape="$LANDDIR/`ls $LANDDIR | shuf -n1`"
portrait="$PORTDIR/`ls $PORTDIR | shuf -n1`"
# Make the new wallpaper
wall=`mktemp`.png
montage $portrait $landscape -tile 2x1 -geometry "3000x1920+0+0" -mode concatenate $wall
# Set the wallpaper
feh --bg-tile $wall
# Delete the temporary image
rm $wall
Offline
MatejLach wrote:So here's a little, dirty bash function, that lets you search the web from cl.
Maybe there's an obvious reason I'm not seeing, but why this when you could just use surfraw?
Yes, there is (was).
It was already suggested to me to use surfraw.
(Hint : read my post and my second post in this topic carefully :-)
Maybe because I do not want to 'chunk' (extra configs etc) my system, just because I want to do some basic search from the cli, using some easy to remember keywords + for all this to be easily expandable.
I am not familiar with surfraw anyway, but it seems to me, to be just another unnecessary package on my system, that's all.
Last edited by MatejLach (2012-04-22 01:13:40)
Offline
Yes, there is (was).
It was already suggested to me to use surfraw.(Hint : read my post and my second post in this topic carefully :-)
Maybe because I do not want to 'chunk' (extra configs etc) my system, just because I want to do some basic search from the cli, using some easy to remember keywords + for all this to be easily expandable.
I am not familiar with surfraw anyway, but it seems to me, to be just another unnecessary package on my system, that's all.
Ah, just a page ago another user recommended surfraw to you--I ought to have read the previous posts. I apologize for the redundancy. Surfraw just needs a .surfraw.conf to configure it, and optionally a bookmarks file if you want to access bookmarks via quick cli shortcuts. To me, surfraw is indispensable (especially since I can add additional search providers, called "elvi", myself as needed), but to each his own .
Registed Linux User 483618
Offline
MatejLach wrote:Yes, there is (was).
It was already suggested to me to use surfraw.(Hint : read my post and my second post in this topic carefully :-)
Maybe because I do not want to 'chunk' (extra configs etc) my system, just because I want to do some basic search from the cli, using some easy to remember keywords + for all this to be easily expandable.
I am not familiar with surfraw anyway, but it seems to me, to be just another unnecessary package on my system, that's all.
Ah, just a page ago another user recommended surfraw to you--I ought to have read the previous posts. I apologize for the redundancy. Surfraw just needs a .surfraw.conf to configure it, and optionally a bookmarks file if you want to access bookmarks via quick cli shortcuts. To me, surfraw is indispensable (especially since I can add additional search providers, called "elvi", myself as needed), but to each his own .
Yeah, it certainly looks interesting, I just wasn't aware of it and I am not even sure, if I need it right now, but I completely understand why you suggested it, thanks. :-)
Offline
Made a screenshot script inspired by grabup.
#!/bin/sh
cd ~/media/img/scrshot/ && scrot -s && rsync -zr * wallaby.ishpeck.net:www/scrshot/ && echo http://www.ishpeck.net/scrshot/`ls --color=never -t | head -n 1` | xclip
Here it is in action:
Naturally, my ssh key is authorized on the target server.
If I were to ask you a hypothetical question, what would you want it to be about?
Offline
Made a pastebin clone.
#!/bin/sh
textfile=`date | sed 's/[ :]/_/g' | sed 's/$/.txt/'`
cd ~/media/txt && xclip -o > $textfile && rsync -zr * wallaby.ishpeck.net:www/txt/ && echo http://www.ishpeck.net/txt/$textfile | xclip
Sends highlighted text to web server and copies URL to clipboard.
If I were to ask you a hypothetical question, what would you want it to be about?
Offline
aur - It's probably not the best way to write this, but I'm proud of it. It downloads and installs packages from the AUR, checks latest versions of installed packages etc. You can see in the help command. I named it "aur" and added it to my path, so I can easily type "aur install dropbox" to install dropbox, or "aur version" to check the latest version of all of my foreign packages on the aur.
* Removed lynx thanks to rwd.
#!/bin/bash
maketar () {
tar -xvzf $1.tar.gz
cd $1
makepkg -s
if [ -f $1*.tar.xz ]; then
echo "sudo pacman -U $1*.tar.xz"
sudo pacman -U $1*.tar.xz
else
echo "Failed somewhere. Invalid PKGBUILD maybe? (Check output above)"
exit
fi
cd ..
rm -rf $1 $1.tar.gz
}
# Give help info
if [ "$1" = "--help" ]; then
echo "Install packages from the aur, check installed and aur versions of packages"
echo "Usage: aur [OPTION] [PACKAGE]"
echo "Example: 'aur version PACKAGE' lists the installed and latest version available in the aur of PACKAGE"
echo ""
echo "Options:"
echo " installed Lists installed packages, a PACKAGE is optional"
echo " version Lists installed, and latest version of packages, a PACKAGE is optional"
echo " install Downloads and installs PACKAGE from the aur"
echo " download Downloads the PACKAGE from the aur, no install"
exit
fi
# Check the installed and latest version of packages
if [ "$1" = "version" ]; then
if [ "$2" = "" ]; then
for p in $(pacman -Qm | awk '{print $1}')
do
echo "Processing $p"
localpackage=$(pacman -Qm | grep $p | head -1 | awk '{print $2}')
aurpackage=$(curl -s https://aur.archlinux.org/packages/$p/PKGBUILD | grep "pkgver=" | cut -b 8-20)
if [ "$aurpackage" = "" ]; then
aurpackage="doesn't exist"
fi
echo "local = $localpackage"
echo "aur = $aurpackage"
done
else
echo "Processing $2"
if pacman -Qm | awk '{print $1}' | grep -x $2 > /dev/null; then
localpackage=$(pacman -Qm | grep $2 | head -1 | awk '{print $2}')
else
localpackage="not installed"
fi
aurpackage=$(curl -s https://aur.archlinux.org/packages/$2/PKGBUILD | grep "pkgver=" | cut -b 8-20)
if [ "$aurpackage" = "" ]; then
aurpackage="doesn't exist"
fi
echo "local = $localpackage"
echo "aur = $aurpackage"
fi
exit
fi
# Download package
if [ "$1" = "download" ] && [ "$2" != "" ]; then
if [ -f $2.tar.gz ]; then
echo "File aready exists"
exit
fi
wget -q https://aur.archlinux.org/packages/$2/$2.tar.gz
if [ "$?" != "0" ]; then
echo "Package does not exist."
fi
if [ -f $2.tar.gz ]; then
echo "Download of $PWD/$2.tar.gz is complete."
else
echo "Error downloading.. try again.."
fi
exit
fi
# Check all installed foreign packages, or just one if specified
if [ "$1" = "installed" ]; then
foreign=$(pacman -Qm)
if [ "$2" = "" ]; then
echo $foreign
else
if echo $foreign | awk '{print $1}' > /dev/null; then
echo $foreign | awk '{print $1" "$2}'
else
echo "Package is not installed."
fi
fi
exit
fi
# Install package, or download and install from AUR if it doesnt exist
if [ "$1" = "install" ] && [ "$2" != "" ]; then
if [ -f $2.tar.gz ]; then
maketar $2
else
wget -q https://aur.archlinux.org/packages/$2/$2.tar.gz
if [ "$?" != "0" ]; then
echo "Package does not exist."
fi
if [ -f $2.tar.gz ]; then
maketar $2
echo "Installation of $1 complete."
else
echo "Error downloading.. try again.."
fi
fi
exit
fi
# Error
echo "Usage: aur [install, download, version, installed] [PACKAGE]"
echo "Try 'aur --help' for more information."
Last edited by guvery (2012-04-26 23:42:41)
Offline
It requires lynx to run.
Wouldn't 'curl' be a more logical choice for this? Most systems already have this installed.
Offline
guvery wrote:It requires lynx to run.
Wouldn't 'curl' be a more logical choice for this? Most systems already have this installed.
That is much better, thanks! I can now add curl to my skills, instead of lynx.
Last edited by guvery (2012-04-26 23:45:00)
Offline
A simple json xkcd utility. Returns the image url of the latest comic or a random image url if passed the --random option. There are probably several existing xkcd utilities in this thread. Now there is one more. It needs error checking code.
#!/usr/bin/env python3
import sys
import json
import random
import urllib.request
f = urllib.request.urlopen('http://xkcd.com/info.0.json').read().decode('utf-8')
js = json.loads(f)
if len(sys.argv) == 2 and sys.argv[1] == '--random':
idx = random.randint(1, js['num'])
f = urllib.request.urlopen('http://xkcd.com/%d/info.0.json' % idx).read().decode('utf-8')
js = json.loads(f)
print(js['img'])
Last edited by mikesd (2012-04-30 02:27:35)
Offline
a simple flash downloader:
#!/bin/bash
#Speicherort
dir=/home/$USER
flash=$(lsof -n | grep Flash | awk '{print "/proc/" $2 "/fd/" $4}' | sed 's/.$//' | head -n1)
if [ "$flash" = "" ] then
echo "[ Error ] Kein Flash gefunden"
exit
fi
while true;do
now=$(date +"%H-%M-%S")
var1=$(md5sum $flash | awk '{print $1}')
sleep 2s
var2=$(md5sum $flash | awk '{print $1}')
if [ $var1 = $var2 ]; then
echo "[ $now ][$var1] done, move file to $dir/$var1.flv"
cp $flash $dir/$var1.flv
break
else
echo "[ $now ][$var1] buffering.."
fi
done
or here with some other linuxstuff (german):
http://echtor.net/index.php?entry=entry120426-213855
Offline
Probably been done a thousand times before, but here's a script to change the wallpaper periodically, Windows 7 style. Depends on hsetroot and Python. Thanks to rock turtle for helping me out on one point. If anyone wants any features, send me a PM and there's an off chance I might be able to figure out how to do it.
#!/usr/bin/env python3
import argparse
#import filecmp
import imghdr
import os.path
import random
import time
import subprocess
from os import listdir
from sys import exit, stderr
def main():
args = argumentProcessing()
while True:
images = getImages(args.directory, args.recursive)
setbg(images)
time.sleep(args.sleep * 60)
def argumentProcessing():
arguments = argparse.ArgumentParser(
description = 'Use hsetroot to periodically ' \
'change the background')
arguments.add_argument('-r',
'--recursive',
action='store_true',
help='Search through directories recursively')
arguments.add_argument('-s',
'--sleep',
type=int,
default=10,
metavar='N',
help='Time (in minutes) to wait before changing ' \
'backgrounds. Default: 10')
arguments.add_argument('directory',
metavar='Directory',
help='Directory containing image files')
return arguments.parse_args()
def getImages(directory, recursive):
files = listdir(directory)
filepaths = list()
for item in files:
filepath = directory + '/' + item
if os.path.isdir(filepath):
if recursive:
filepaths.extend(getImages(filepath,
recursive))
continue
else:
if imghdr.what(filepath) == None:
continue
filepaths.append(filepath)
#print(filepaths)
return filepaths
def setbg(images):
try:
imgnum = random.randint(0, len(images) - 1)
except ValueError:
print("No images found.", file=stderr)
exit()
command = ['hsetroot', '-fill', images[imgnum] ]
subprocess.call(command)
if __name__ == '__main__':
main()
Haven't tested the recursive feature yet. I'll get right on that.
Recursive didn't work, but I fixed it.
Last edited by Bellum (2012-05-03 05:23:46)
Offline
I needed a fast scanner to periodically inventory my subnet and produce concise, readable output without any information I don't need, so I wrote this python script.
I'm checking tcp 9100 because I'd like to eventually flag printers (and showing the printer model would be a nice bonus) in the output.
If anyone can provide a better regular expression for the ip range, please enlighten me. Currently, I can't match forms such as 192.168.1.1/24.
I'd also like a less dirty way to pull the netbios name from nmap's output, but as an nmap script, it's not in the nice xml format.
Also, any silliness in my python coding is fair game for critique. I'm still getting my feet wet in the language (and am liking it so far).
Gosh, that was a mouthful. Anyway, the script works fine enough (for its purpose) to share.
I have cron dumping output to a file on my web server.
#!/usr/bin/env python2
# deps: nmap, sudo
# import modules
import re
import subprocess
import sys
import xml.dom.minidom
# class to store important scan information
class machine():
ip = ""
mac = ""
netbios = ""
def __init__(self, ip, mac, netbios):
self.ip = ip
self.mac = mac
self.netbios = netbios
# check for required argument ip_range
if len(sys.argv) > 1:
ip_range = sys.argv[1]
else:
sys.exit("Please give an IP address range in nmap format as an argument.")
# check that ip_range is formatted correctly for nmap
ip_check = re.compile('^(?:[0-9*-]{1,3}\.){3}[0-9*-]{1,3}$') # this doesn't work well or cover all valid syntax, FIX IT!
if ip_check.match(ip_range) == None:
sys.exit("Argument must be an IP address range in nmap format.")
# run the nmap scan
nmap_scan_xml = subprocess.check_output(["sudo", "nmap", "-oX", "-", "-Pn", "--script", "nbstat.nse", "-sU", "-sS", "-p137,9100", ip_range])
# parse the results
nmap_scan_dom = xml.dom.minidom.parseString(nmap_scan_xml)
hosts = nmap_scan_dom.getElementsByTagName("host")
machines = []
for host in hosts:
ip = host.getElementsByTagName("address")[0].getAttribute("addr")
if len(host.getElementsByTagName("address")) > 1:
mac = host.getElementsByTagName("address")[1].getAttribute("addr")
else:
mac = ""
if len(host.getElementsByTagName("script")) > 0:
nbstat = host.getElementsByTagName("script")[0].getAttribute("output")
netbios = nbstat.split()[1 + nbstat.split().index("name:")].split(',')[0]
else:
netbios = ""
machines.append(machine(ip, mac, netbios))
# output results
for machine in machines:
print machine.ip,
print machine.mac,
print machine.netbios
Offline
I think you can drop 'grep' and use just awk.
Can you post a sample output of 'wget --max-redirect 0 "$@" 2>&1'?
Offline
This are two handy scripts I always use when either generating or burning ISO files. The MD5 checksums help you to verify that either the generated ISO matched the media in your drive or the burned media is identical to the ISO. I wrote them to mimic the functionality of ImgBurn under Windows.
I got the stuff about blocksize and blockcount from another shellscript I found in the www. Unfortunately I do not remember where.
rip2ISO generates an ISO from a media in your drive:
#!/bin/bash
# Setting all required variables
dvd_device="/dev/sr0"
iso="$1"
echo "Start ripping ISO file $1 from CD/DVD"
blocksize=$(isoinfo -d -i $dvd_device | grep "^Logical block size is:" | cut -d " " -f 5)
blockcount=$(isoinfo -d -i $dvd_device| grep "^Volume size is:" | cut -d " " -f 4)
dd if=$dvd_device of=$iso bs=$blocksize count=$blockcount
echo "Verifying ripped ISO"
# Generating temporary CD/DVD image
md5cd=`dd if=$dvd_device bs=$blocksize count=$blockcount | md5sum` >&2
echo $(echo $md5cd | cut -d " " -f 1) " $dvd_device"
md5sum $iso
burnISO works vice versa to rip2ISO. An ISO is burned to disc and verified afterwards:
#!/bin/bash
# Setting all required variables
dvd_device="/dev/sr0"
iso="$1"
echo "Start burning ISO file $1 to CD/DVD"
cdrecord speed=4 "$iso"
echo "Cycling CD/DVD tray to (re)read burned media"
sleep 5
echo "Opening tray"
eject -T $dvd_device
sleep 10
echo "Closing tray"
eject -T $dvd_device
sleep 10
echo "Verifying burned ISO"
blocksize=$(isoinfo -d -i $dvd_device | grep "^Logical block size is:" | cut -d " " -f 5)
blockcount=$(isoinfo -d -i $dvd_device| grep "^Volume size is:" | cut -d " " -f 4)
dd if=$dvd_device of=$iso bs=$blocksize count=$blockcount
# Generating temporary CD/DVD image
md5cd=`dd if=$dvd_device bs=$blocksize count=$blockcount | md5sum` >&2
echo $(echo $md5cd | cut -d " " -f 1) " $dvd_device"
md5sum $iso
My archlinux x86_64 host:
AMD E350 (2x1.6GHz) / 8GB DDR3 RAM / GeForce 9500GT (passive) / Arch running from 16GB USB Stick
Offline
Out of curiosity, is running just 'md5sum /dev/cdrom' slower or faster than your method?
Offline
Out of curiosity, is running just 'md5sum /dev/cdrom' slower or faster than your method?
The major issue is that this will generate a different checksum than the ISO. At least that is what I can tell from my experiments. You need to use the exact blocksize and block count to get identical checksums. Similar to running dd if=/dev/cdrom and pipe it to md5sum. For me only the approach with bs and count as parameters for dd worked.
My archlinux x86_64 host:
AMD E350 (2x1.6GHz) / 8GB DDR3 RAM / GeForce 9500GT (passive) / Arch running from 16GB USB Stick
Offline
For burning CDs I use the '-dao' (disc at once) switch and I get the same md5sums when checksumming directly.
Offline