diff --git a/README.md b/README.md
index 58eb6bf6e1a1a6a2702f11d80b66f0d702e475df..65ad425ddec99b6dd568ab3508a8cb3fef465ae8 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,6 @@
-
 Perl script to sync individual projects to another Redmine instance.
 
-Status
+h2. Status
 
 My current goal is to synchronize one isolated project, which only
 has Wiki pages and attachments, from one Redmine instance to another.
@@ -10,25 +9,30 @@ This is more complicated than initially assumed, but still leaves out
 the rather hairy problem of migrating issues or even issue numbers and
 whatever embedded in Wiki text.
 
-Right now, synchronizing stuff that are related to users work.
-Wiki migration seems to work now too.
+* Right now, migrating stuff that are related to users work.
+* Wiki migration seems to work now too.
+* Migrating attachments (using rsync'ed backup files) now work too.
+* Migrating watchers works.
+
+The next step: Maybe I should reconsider the core synchronization code,
+but this will have to wait until the migration job at hand is completed.
 
-The next step: Wiki-Attachments
+h2. Notes
+
+h3. Perl
 
 Why Perl?  I'm not fluent enough in Ruby to even consider it as the
 tool of choice for this problem.  The script directly talks with
 the MySQL databases of the Redmine instances, it basically ignores
 the API.
 
-
-NOTES
+h3. To be improved
 
 The project's entry in 'wikis' whould be added to syncs by hand
 since Redmine creates the Wiki but the script currently doesn't
 check for that, it only looks at the syncs table.
 
-TODOS
+Also, the pre-configured trackers and roles need to be considered.
 
-* watchers: point to various content types, so only stuff
-  that is actually handled should be migrated here
+h2. TODOs
 
diff --git a/lib/Redmine/DB/CTX.pm b/lib/Redmine/DB/CTX.pm
index f364c6eddd19621d0c014ccec9e8c1711fceafe6..be6ee89f030597a8926c445d209ed691b736f0ff 100644
--- a/lib/Redmine/DB/CTX.pm
+++ b/lib/Redmine/DB/CTX.pm
@@ -36,8 +36,8 @@ sub sync_project
   my $sp_id= shift;
   my $dp_id= shift;
 
-  # $ctx->sync_project_members ($sp_id, $dp_id);
-  # $ctx->sync_project_user_preferences ($sp_id, $dp_id);
+  $ctx->sync_project_members ($sp_id, $dp_id);
+  $ctx->sync_project_user_preferences ($sp_id, $dp_id);
   $ctx->sync_wiki ($sp_id, $dp_id);
 }
 
@@ -233,7 +233,7 @@ would not really be an issue.
     my $s_user=      $s_users->{$s_user_id};
 # next unless ($s_user->{'type'} eq 'Group');
 
-    print "s_member: ", Dumper ($s_member);
+    # print "s_member: ", Dumper ($s_member);
     my $d_user_id= $ctx->sync_user ($s_user_id, $s_user);
 
     my ($d_member_id, $d_status, $d_sync_date)= $ctx->translate ('members', $s_member_id);
@@ -370,7 +370,7 @@ sub sync_user
     $s_user= $res->{$s_user_id};
   }
 
-    print "s_user: ", Dumper ($s_user);
+    # print "s_user: ", Dumper ($s_user);
 
     my ($d_user_id, $d_status, $d_sync_date)= $ctx->translate ('users', $s_user_id);
     print "s_user_id=[$s_user_id] d_user_id=[$d_user_id] d_status=[$d_status] d_sync_date=[$d_sync_date]\n";
@@ -506,8 +506,44 @@ sub sync_wiki
   $ctx->sync_generic_table ($s_pcx, 'wiki_redirects', [ [ 'wiki_id' => 'wikis' ] ]);
   $ctx->sync_generic_table ($s_pcx, 'wiki_contents',  [ [ 'page_id' => 'wiki_pages' ], ['author_id' => 'users' ] ]);
   $ctx->sync_generic_table ($s_pcx, 'wiki_content_versions',  [ [ 'wiki_content_id' => 'wiki_contents'], [ 'page_id' => 'wiki_pages' ], ['author_id' => 'users' ] ]);
+
+  print '='x72, "\n";
+  # print "attachments: ", Dumper ($s_pcx->{'wiki_attachments'});
+
+  # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
+  # attachments
+  my $added= $ctx->sync_generic_table ($s_pcx, 'wiki_attachments',  [ [ 'container_id' => 'wiki_pages'], [ 'author_id' => 'users' ] ]);
+  # print "added: ", Dumper ($added);
+
+  if (defined ($ctx->{'copy_attachment'}))
+  {
+    my $cpa= $ctx->{'copy_attachment'};
+    foreach my $item (@$added)
+    {
+      &$cpa ($ctx, @$item);
+    }
+  }
+
+  # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
+  # watchers
+  print "wiki_watchers: ", Dumper ($s_pcx->{'wiki_watchers'});
+  print "wiki_page_watchers: ", Dumper ($s_pcx->{'wiki_page_watchers'});
+  $ctx->sync_generic_table ($s_pcx, 'wiki_watchers',       [ [ 'watchable_id' => 'wikis'     ], [ 'user_id' => 'users' ] ]);
+  $ctx->sync_generic_table ($s_pcx, 'wiki_page_watchers',  [ [ 'watchable_id' => 'wiki_pages'], [ 'user_id' => 'users' ] ]);
 }
 
+# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+# virtual tables: process attachments separately for each container type,
+# so syncing the wiki will sync the wiki's attachments and ignore those
+# of the issues etc.
+
+my %TLT_table_name=
+(
+  'wiki_attachments'   => 'attachments',
+  'wiki_watchers'      => 'watchers',
+  'wiki_page_watchers' => 'watchers',
+);
+
 sub sync_generic_table
 {
   my $ctx= shift;
@@ -519,23 +555,27 @@ sub sync_generic_table
   print "sync_generic_table: table_name=[$table_name]\n";
   my $table= $s_pcx->{$table_name};
   # print "table [$table_name] ", Dumper ($table); exit;
+  my $tlt_table_name= $TLT_table_name{$table_name} || $table_name;
 
+  my @added= ();
   my $cnt= $ctx->stats($table_name);
   my @s_ids= sort { $a <=> $b} keys %$table; # maybe sorting helps to bring order into an hierarchy
   print "s_ids: ", join (',', @s_ids), "\n";
   ITEM: while (my $s_id= shift (@s_ids))
   {
-    my $d_id= $ctx->translate ($table_name, $s_id);
+    my $d_id= $ctx->translate ($tlt_table_name, $s_id);
     print "d_id=[$d_id]\n";
     $cnt->{'processed'}++;
 
     if (defined ($d_id))
-    {
+    { # TODO: the object is already on the destination, maybe we should
+      # fetch it and see if any records need to be updated.
       $cnt->{'unchanged'}++;
     }
     else
     {
-      my %data= %{$table->{$s_id}};
+      my $orig= $table->{$s_id};
+      my %data= %$orig;
       delete ($data{'id'});
 
       # translate attributes (an) pointing to table (tn); $tlt is a list of pairs
@@ -548,7 +588,7 @@ sub sync_generic_table
 
         unless (defined ($d_av))
         {
-          if ($tn eq $table_name)
+          if ($tn eq $tlt_table_name)
           { # this is a self referential table, put the (yet unresolved) to the head of the queue
             # TODO: this could lead to an endless loop!
             unshift (@s_ids, $s_av);
@@ -563,16 +603,21 @@ sub sync_generic_table
         $data{$an}= $d_av;
       }
 
-      $d_id= $ctx->{'dst'}->insert ($table_name, \%data);
-      $ctx->store_translation($table_name, $s_id, $d_id);
+      $d_id= $ctx->{'dst'}->insert ($tlt_table_name, \%data);
+      $ctx->store_translation($tlt_table_name, $s_id, $d_id);
+      $data{'id'}= $d_id; # now we know the record's id, so we can as well save it
+
       $cnt->{'added'}++;
+      push (@added, [ $orig, \%data ]);
     }
   }
 
-  $cnt;
+  \@added;
 }
 
-=head1 INTERNAL METHODS?
+=head1 INTERNAL METHODS
+
+or so..
 
 =cut
 
diff --git a/lib/Redmine/DB/MySQL.pm b/lib/Redmine/DB/MySQL.pm
index b5078549a26b769a2ddf801ea3f0d28b6c06088b..cd49e17c20576fd86545dcf2e60b7397d6ac6ee8 100644
--- a/lib/Redmine/DB/MySQL.pm
+++ b/lib/Redmine/DB/MySQL.pm
@@ -40,6 +40,18 @@ sub table
   $t;
 }
 
+=head2 $con->get_all_x ($table_name, $query_ref)
+
+Query_ref is an array reference where the first parameter gives the WHERE clause (without the string "WHERE").
+The query should not contain untrustable values, these should be indicated by placeholders (an "?" for each
+value).  The values make up the rest of the array reference.
+
+Side effect: caches values in $con->{$table_name};
+
+Returns all retrieved records.
+
+=cut
+
 sub get_all_x
 {
   my $self= shift;
@@ -74,14 +86,16 @@ sub get_all_x
   $sth->execute(@v);
 
   my $t= $self->table($table);
+  my $tt= {};
 
   while (defined (my $x= $sth->fetchrow_hashref()))
   {
     print "x: ", Dumper ($x) if ($show_fetched);
-    $t->{$x->{'id'}}= $x;
+    my $i= $x->{'id'};
+    $t->{$i}= $tt->{$i}= $x;
   }
 
-  $t;
+  $tt;
 }
 
 sub insert
@@ -121,6 +135,33 @@ sub insert
   $id;
 }
 
+sub update
+{
+  my $self= shift;
+  my $table= shift;
+  my $id= shift;
+  my $updates= shift;
+
+  my $dbh= $self->connect();
+  return undef unless (defined ($dbh));
+
+  my (@vars, @vals);
+  foreach my $an (keys %$updates)
+  {
+    push (@vars, $an);
+    push (@vals, $updates->{$an});
+  }
+  push (@vals, $id);
+
+  my $ssu= "UPDATE `$table` SET ". join (' ', map { $_.'=?' } @vars) . ' WHERE id=?';
+  print "ssu=[$ssu]\n";
+  print "vals: ", join (',', @vals), "\n";
+  my $sth= $dbh->prepare($ssu);
+  $sth->execute(@vals);
+  print "ERROR: ", $dbh->errstr() if ($dbh->err);
+  $sth->finish();
+}
+
 sub mysql
 {
   my $self= shift;
@@ -158,7 +199,7 @@ sub get_users
 
   # print "missing users: [", join (' ', @missing_users), "]\n";
   my $in= $an . ' IN ('. join(',', map { '?' } @_) . ')';
-  $show_query= $show_fetched= 1;
+  # $show_query= $show_fetched= 1;
   $self->get_all_x ('users', [ $in, @_ ]),
 }
 
@@ -211,7 +252,7 @@ sub pcx_members
 
 =head2 $con->pcx_wiki ($project_id)
 
-retrieve data related to the Wiki
+Retrieve data related to the Wiki associated with $project_id.
 
 Right now, we assume we can handle the amount of data returned, see
 notes in the code.
@@ -240,7 +281,7 @@ sub pcx_wiki
       print Dumper ($wikis);
     }
 
-    foreach my $wiki_id (@wiki_ids)
+    PROJECT_WIKI: foreach my $wiki_id (@wiki_ids)
     {
       my $wiki_pages= $self->get_all_x ('wiki_pages', [ 'wiki_id=?', $wiki_id ]);
       # $res->{'wiki_pages'}->{$wiki_id}= $wiki_pages; # one layer too many!
@@ -255,11 +296,26 @@ sub pcx_wiki
       # TODO: for now, assume we can handle the amount of data returned;
       # it might be necessary to introduce callbacks deal with the text
 
-      my $sel= 'page_id IN (SELECT id FROM wiki_pages WHERE wiki_id=?)';
-      my $wiki_contents=         $self->get_all_x ('wiki_contents',         [ $sel, $wiki_id ]);
-      my $wiki_content_versions= $self->get_all_x ('wiki_content_versions', [ $sel, $wiki_id ]);
+      my $sel_wiki_pages= '(SELECT id FROM wiki_pages WHERE wiki_id=?)';
+      my $wiki_contents=         $self->get_all_x ('wiki_contents',         [ 'page_id IN ' . $sel_wiki_pages, $wiki_id ]);
+      my $wiki_content_versions= $self->get_all_x ('wiki_content_versions', [ 'page_id IN ' . $sel_wiki_pages, $wiki_id ]);
       $res->{'wiki_contents'}=         $wiki_contents;
       $res->{'wiki_content_versions'}= $wiki_content_versions;
+
+      # attachments
+      my $sel2= 'container_id IN ' . $sel_wiki_pages . " AND container_type='WikiPage'";
+      my $wiki_attachments=       $self->get_all_x ('attachments', [ $sel2, $wiki_id ]);
+      $res->{'wiki_attachments'}= $wiki_attachments;
+
+      # watchers
+$show_query= 1;
+      my $wiki_watchers=       $self->get_all_x ('watchers', [ "watchable_type='Wiki' AND watchable_id=?", $wiki_id ]);
+      my $wiki_page_watchers=  $self->get_all_x ('watchers', [ "watchable_type='WikiPage' AND watchable_id IN ".$sel_wiki_pages, $wiki_id ]);
+
+      $res->{'wiki_watchers'}= $wiki_watchers;
+      $res->{'wiki_page_watchers'}= $wiki_page_watchers;
+
+      last PROJECT_WIKI; # TODO: hmm... I really should check if there could be more than one wiki per project
     }
   }
 
diff --git a/t_sync.pl b/t_sync.pl
index c9a049cef9cbea658751e64efdb39d1a05de60f0..658c3353b65925d907dae50b3fd52e957f896f48 100755
--- a/t_sync.pl
+++ b/t_sync.pl
@@ -57,11 +57,15 @@ my $setup=
   {
     'config' => '/home/gg/etc/src/database.yml',
     'db' => 'production',
+    'attachment_base' => '/home/backup/redmine-phaidra/files',
+    'attachment_with_directory' => 0, # Redmine version 1.x does not have that attribute
   },
   'dst' =>
   {
     'config' => '/home/gg/etc/dst/database.yml',
     'db' => 'production',
+    'attachment_base' => '/var/lib/redmine/default/files',
+    'attachment_with_directory' => 1, # Redmine version 2.x has that attribute
   },
   'sync_context_id' => 1,
   'syncs' => # not used, instead, this is written directly into the database
@@ -113,6 +117,39 @@ sub usage
   exit (0);
 }
 
+# callback function to actually copy the attachment files
+# NOTE/TODO: be sure about the permissions, this script needs to write into Redmine's files directory
+sub copy_attachment
+{
+  my $ctx=    shift;
+  my $orig=   shift;  # original record
+  my $synced= shift;  # synchronized record
+
+  my @s_fnm= $setup->{'src'}->{'attachment_base'}; # TODO/NOTE: hmm... the attachment_base doesn't make it into the context!
+  push (@s_fnm, $orig->{'disk_directory'}) if (exists ($orig->{'disk_directory'}) && defined ($orig->{'disk_directory'}));
+  push (@s_fnm, $orig->{'disk_filename'});
+  my $s_fnm= join ('/', @s_fnm);
+  # TODO: check if the file is there and stuff
+
+  my @d_fnm= $setup->{'dst'}->{'attachment_base'};
+
+  # if (exists ($synced->{'disk_directory'})) # ... && defined ($synced->{'disk_directory'}))
+  if ($setup->{'dst'}->{'attachment_with_directory'})
+  { # new Redmine version has a structured attachments directory
+    my $disk_dir= join ('/', 'sync', $setup->{'sync_context_id'});
+    push (@d_fnm, $disk_dir);
+    $ctx->{'dst'}->update ('attachments', $synced->{'id'}, { 'disk_directory' => $disk_dir });
+  }
+
+  my $d_fnm= join ('/', @d_fnm);
+  system ('mkdir', '-p', $d_fnm) unless (-d $d_fnm);
+  $d_fnm .= '/'.  $synced->{'disk_filename'};
+
+  print "copy attachment [$s_fnm] -> [$d_fnm]\n";
+
+  system ('cp', $s_fnm, $d_fnm);
+}
+
    if ($op_mode eq 'usage') { usage(); }
 elsif ($op_mode eq 'prep')
 {
@@ -179,7 +216,8 @@ elsif ($op_mode eq 'sync')
   my $dst= read_configs($setup, 'dst');
   # my $x_u= $src->get_all_users();
 
-  my $ctx= new Redmine::DB::CTX ('ctx_id' => $setup->{'sync_context_id'}, 'src' => $src, 'dst' => $dst);
+  my $ctx= new Redmine::DB::CTX ('ctx_id' => $setup->{'sync_context_id'}, 'src' => $src, 'dst' => $dst,
+     'copy_attachment' => \&copy_attachment);
 
   # print "setup: ", Dumper ($setup);