File: gcp

package info (click to toggle)
gcp 0.2.1-1
  • links: PTS, VCS
  • area: main
  • in suites: bullseye, sid
  • size: 304 kB
  • sloc: python: 770; makefile: 23
file content (748 lines) | stat: -rwxr-xr-x 29,727 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
#!/usr/bin/env python3

"""
gcp: Gcp CoPier
Copyright (c) 2010, 2011  Jérôme Poisson <goffi@goffi.org>
          (c) 2011        Thomas Preud'homme <robotux@celest.fr>
          (c) 2016        Jingbei Li <i@jingbei.li>
          (c) 2018, 2019  Matteo Cypriani <mcy@lm7.fr>

This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.

This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU General Public License for more details.

You should have received a copy of the GNU General Public License
along with this program.  If not, see <http://www.gnu.org/licenses/>.
"""

import logging
from logging import debug, info, error, warning

import gettext

import sys
import os
import os.path
from argparse import ArgumentParser, RawDescriptionHelpFormatter
import pickle

logging.basicConfig(level=logging.INFO, format='%(message)s')
gettext.install('gcp', "i18n")

try:
    from gi.repository import GLib
    from dbus.mainloop.glib import DBusGMainLoop
    DBusGMainLoop(set_as_default=True)
    import dbus.service
    import dbus
except ImportError as e:
    error(_("Error during import"))
    error(_("Please check dependecies:"), e)
    exit(1)

try:
    from progressbar import ProgressBar, Percentage, Bar, ETA, FileTransferSpeed
    pbar_available=True
except ImportError as e:
    info (_('ProgressBar not available, please install python-progressbar or download it at http://pypi.python.org/pypi/progressbar'))
    info (_('Progress bar deactivated\n--\n'))
    pbar_available=False

NAME = "gcp (Gcp CoPier)"
NAME_SHORT = "gcp"
VERSION = '0.2.1'

ABOUT = NAME_SHORT + " " + VERSION + """
---
""" + NAME + """
Copyright: 2010-2011 Jérôme Poisson <goffi@goffi.org>
           2011      Thomas Preud'homme <robotux@celest.fr>
           2016      Jingbei Li <i@jingbei.li>
           2018      Matteo Cypriani <mcy@lm7.fr>
This program comes with ABSOLUTELY NO WARRANTY; it is free software,
and you are welcome to redistribute it under certain conditions.
"""

const_DBUS_INTERFACE = "org.goffi.gcp"
const_DBUS_PATH = "/org/goffi/gcp"
const_BUFF_SIZE = 4096
const_PRESERVE = set(['mode','ownership','timestamps'])
const_PRESERVE_p = 'mode,ownership,timestamps'
const_FS_FIX = set(['auto','force','no'])
const_FILES_DIR = "~/.gcp"
const_JOURNAL_PATH = const_FILES_DIR + "/journal"
const_SAVED_LIST = const_FILES_DIR + "/saved_list"


class DbusObject(dbus.service.Object):

    def __init__(self, gcp, bus, path):
        self._gcp = gcp
        dbus.service.Object.__init__(self, bus, path)
        debug(_("Init DbusObject..."))
        self.cb={}

    @dbus.service.method(const_DBUS_INTERFACE,
                         in_signature='', out_signature='s')
    def getVersion(self):
        """Get gcp version
        @return: version as string"""
        return VERSION

    @dbus.service.method(const_DBUS_INTERFACE,
                         in_signature='ss', out_signature='bs')
    def addArgs(self, source_dir, args):
        """Add arguments to gcp as if there were entered on its own command line
        @param source_dir: current working dir to use as base for arguments, as given by os.getcwd()
        @param args: serialized (wich pickle) list of strings - without command name -, as given by sys.argv[1:].
        @return: success (boolean) and error message if any (string)"""
        try:
            args = pickle.loads(str(args))
        except TypeError as e:
            pickle.UnpicklingError = e
            return (False, _("INTERNAL ERROR: invalid arguments"))
        try:
            source_dir = pickle.loads(str(source_dir))
        except TypeError as e:
            pickle.UnpicklingError = e
            return (False, _("INTERNAL ERROR: invalid source_dir"))
        return self._gcp.parseArguments(args, source_dir)


class Journal():

    def __init__(self, path=const_JOURNAL_PATH):
        self.journal_path = os.path.expanduser(path)
        self.journal_fd = open(self.journal_path,'w') #TODO: check and maybe save previous journals
        self.__entry_open = None
        self.failed = []
        self.partial = []

    def __del__(self):
        self.journal_fd.flush()
        self.journal_fd.close()

    def startFile(self, source_path):
        """Start an entry in the journal"""
        assert not self.__entry_open
        self.__entry_open = source_path
        self.journal_fd.write(source_path+"\n")
        self.journal_fd.flush()
        self.success=True
        self.errors=[]

    def closeFile(self):
        """Close the entry in the journal"""
        assert self.__entry_open
        if not self.success:
            status = "FAILED"
        else:
            status = "OK" if not self.errors else "PARTIAL"
        self.journal_fd.write("%(status)s: %(errors)s\n" % {'status': status, 'errors': ', '.join(self.errors)})
        self.journal_fd.flush()
        self.__entry_open = None

    def copyFailed(self):
        """Must be called when something is wrong with the copy itself"""
        assert self.__entry_open
        self.success = False
        self.failed.append(self.__entry_open)

    def error(self, name):
        """Something went wrong"""
        assert self.__entry_open
        self.errors.append(name)
        self.partial.append(self.__entry_open)

    def showErrors(self):
        """Show which files were not successfully copied"""
        failed = set(self.failed)
        partial = set(self.partial)
        for entry in failed:
            partial.discard(entry)

        if failed:
            error(_("/!\\ THE FOLLOWING FILES WERE *NOT* SUCCESSFULY COPIED:"))
            #TODO: use logging capability to print all error message in red
            for entry in failed:
                info("\t- %s" % entry)
            info ('--\n')
        if partial:
            warning(_("The following files were copied, but some errors happened:"))
            for entry in partial:
                info("\t- %s" % entry)
            info ('--\n')

        if failed or partial:
            info(_("Please check journal: %s") % self.journal_path)


class GCP():

    def __init__(self):
        files_dir = os.path.expanduser(const_FILES_DIR)
        if not os.path.exists(files_dir):
            os.makedirs(files_dir)
        try:
            sessions_bus = dbus.SessionBus()
            db_object = sessions_bus.get_object(const_DBUS_INTERFACE,
                                const_DBUS_PATH)
            self.gcp_main = dbus.Interface(db_object,
                                dbus_interface=const_DBUS_INTERFACE)
            self._main_instance = False

        except dbus.exceptions.DBusException as e:
            if e._dbus_error_name=='org.freedesktop.DBus.Error.ServiceUnknown':
                self.launchDbusMainInstance()
                debug (_("gcp launched"))
                self._main_instance = True
                self.buffer_size = const_BUFF_SIZE
                self.__launched = False #True when journal is initialised and copy is started
            else:
                raise

    def launchDbusMainInstance(self):
        debug (_("Init DBus..."))
        session_bus = dbus.SessionBus()
        self.dbus_name = dbus.service.BusName(const_DBUS_INTERFACE, session_bus)
        self.dbus_object = DbusObject(self, session_bus, const_DBUS_PATH)

        self.copy_list = []
        self.mounts = self.__getMountPoints()
        self.bytes_total = 0
        self.bytes_copied = 0

    def getFsType(self, path):
        fs = ''
        last_mount_point = ''
        for mount in self.mounts:
            if path.startswith(mount) and len(mount)>=len(last_mount_point):
                fs = self.mounts[mount]
                last_mount_point = mount
        return fs

    def __getMountPoints(self):
        """Parse /proc/mounts to get currently mounted devices"""
        # TODO: reparse when a new device is added/a device is removed
        #       (check freedesktop mounting signals)
        ret =  {}
        try:
            with open("/proc/mounts",'r') as mounts:
                for line in mounts.readlines():
                    fs_spec, fs_file, fs_vfstype, \
                            fs_mntops, fs_freq, fs_passno = line.split(' ')
                    ret[fs_file] = fs_vfstype
        except:
            error (_("Can't read mounts table"))
        return ret

    def __appendToList(self, path, dest_path, options):
        """Add a file to the copy list
        @param path: absolute path of file
        @param options: options as return by optparse"""
        debug(_("Adding to copy list: %(path)s ==> %(dest_path)s (%(fs_type)s)")
              % {"path":path, "dest_path":dest_path,
                 "fs_type":self.getFsType(dest_path)})
        try:
            self.bytes_total+=os.path.getsize(path)
            self.copy_list.insert(0,(path, dest_path, options))
        except OSError as e:
            error(_("Can't copy %(path)s: %(exception)s")
                  % {'path':path, 'exception':e.strerror})

    def __appendDirToList(self, dirpath, dest_path, options):
        """Add recursively directory to the copy list
        @param path: absolute path of dir
        @param options: options as return by optparse"""
        #We first check that the dest path exists, and create it if needed
        dest_path = self.__fix_filenames(dest_path, options, no_journal=True)
        if not os.path.exists(dest_path):
            debug ("Creating directory %s" % dest_path)
            os.makedirs(dest_path) #TODO: check permissions
        #TODO: check that dest_path is an accessible dir,
        #      and skip file/write error in log if needed
        try:
            for filename in os.listdir(dirpath):
                filepath = os.path.join(dirpath,filename)
                if os.path.islink(filepath) and not options.dereference:
                    debug ("Skippink symbolic dir: %s" % filepath)
                    continue
                if os.path.isdir(filepath):
                    full_dest_path = os.path.join(dest_path,filename)
                    self.__appendDirToList(filepath, full_dest_path, options)
                else:
                    self.__appendToList(filepath, dest_path, options)
        except OSError as e:
            try:
                error(_("Can't append %(path)s to copy list: %(exception)s") % {'path':filepath, 'exception':e.strerror})
            except NameError:
                #We can't list the dir
                error(_("Can't access %(dirpath)s: %(exception)s") % {'dirpath':dirpath, 'exception':e.strerror})

    def __checkArgs(self, options, source_dir, args):
        """Check thats args are files, and add them to copy list
        @param options: options sets
        @param source_dir: directory where the command was entered
        @parm args: args of the copy"""
        assert(len (args)>=2)
        len_args = len(args)
        try:
            dest_path = os.path.normpath(os.path.join(source_dir, args.pop()))
        except OSError as e:
            error (_("Invalid dest_path: %s"),e)

        for path in args:
            abspath = os.path.normpath(os.path.join(os.path.expanduser(source_dir), path))
            if not os.path.exists(abspath):
                warning(_("The path given in arg doesn't exist or is not accessible: %s") % abspath)
            else:
                if os.path.isdir(abspath):
                    if not options.recursive:
                        warning (_('omitting directory "%s"') % abspath)
                    else:
                        _basename=os.path.basename(os.path.normpath(path))
                        full_dest_path = dest_path if options.directdir else os.path.normpath(os.path.join(dest_path, _basename))
                        self.__appendDirToList(abspath, full_dest_path, options)
                else:
                    self.__appendToList(abspath, dest_path, options)

    def __copyNextFile(self):
        """Takes the last file in the list and launches the copy using glib
           io_watch event.
           @return: True a file was added, False otherwise."""
        if not self.copy_list:
            # Nothing left to copy, we quit
            if self.progress:
                self.__pbar_finish()
            self.journal.showErrors()
            self.loop.quit()
            return False

        source_file, dest_path, options = self.copy_list.pop()
        self.journal.startFile(source_file)
        try:
            source_fd = open(source_file, 'rb')
        except:
            self.journal.copyFailed()
            self.journal.error("can't open source")
            self.journal.closeFile()
            return True

        filename = os.path.basename(source_file)
        assert(filename)
        if options.dest_file:
            dest_file = self.__fix_filenames(options.dest_file, options)
        else:
            dest_file = self.__fix_filenames(os.path.join(dest_path, filename),
                                             options)
        if os.path.exists(dest_file) and not options.force:
            warning (_("File [%s] already exists, skipping it!") % dest_file)
            self.journal.copyFailed()
            self.journal.error("already exists")
            self.journal.closeFile()
            source_fd.close()
            return True

        try:
            dest_fd = open(dest_file, 'wb')
        except:
            self.journal.copyFailed()
            self.journal.error("can't open dest")
            self.journal.closeFile()
            source_fd.close()
            return True

        GLib.io_add_watch(source_fd, GLib.IO_IN,self._copyFile,
                          (dest_fd, options),
                          priority=GLib.PRIORITY_DEFAULT)
        if not self.progress:
            info(_("COPYING %(source)s ==> %(dest)s")
                 % {"source":source_file, "dest":dest_file})
        return True

    def __copyFailed(self, reason, source_fd, dest_fd):
        """Write the failure in the journal and close files descriptors"""
        self.journal.copyFailed()
        self.journal.error(reason)
        self.journal.closeFile()
        source_fd.close()
        dest_fd.close()

    def _copyFile(self, source_fd, condition, data):
        """Actually copy the file, callback used with io_add_watch
        @param source_fd: file descriptor of the file to copy
        @param condition: condition which launched the callback (glib.IO_IN)
        @param data: tuple with (destination file descriptor, copying options)"""
        try:
            dest_fd,options = data

            try:
                buff = source_fd.read(self.buffer_size)
            except KeyboardInterrupt:
                raise KeyboardInterrupt
            except:
                self.__copyFailed("can't read source", source_fd, dest_fd)
                return False

            try:
                dest_fd.write(buff)
            except KeyboardInterrupt:
                raise KeyboardInterrupt
            except:
                self.__copyFailed("can't write to dest", source_fd, dest_fd)
                return False

            self.bytes_copied += len(buff)
            if self.progress:
                self._pbar_update()

            if len(buff) != self.buffer_size:
                source_fd.close()
                dest_fd.close()
                self.__post_copy(source_fd.name, dest_fd.name, options)
                self.journal.closeFile()
                return False
            return True
        except KeyboardInterrupt:
            self._userInterruption()

    def __fix_filenames(self, filename, options, no_journal=False):
        """Fix filenames incompatibilities/mistake according to options
        @param filename: full path to the file
        @param options: options as parsed on command line
        @param no_journal: don't write any entry in journal
        @return: fixed filename"""
        fixed_filename = filename

        if options.fix_filenames == 'force' or (options.fix_filenames == 'auto' and self.getFsType(filename) == 'vfat'):
            fixed_filename = filename.replace('\\','_')\
                               .replace(':',';')\
                               .replace('*','+')\
                               .replace('?','_')\
                               .replace('"','\'')\
                               .replace('<','[')\
                               .replace('>',']')\
                               .replace('|','!')\
                               .rstrip() #XXX: suffixed spaces cause issues (must check FAT doc for why)

        if not fixed_filename:
            fixed_filename = '_'
        if fixed_filename != filename and not no_journal:
            self.journal.error('filename fixed')
        return fixed_filename

    def __post_copy(self, source_file, dest_file, options):
        """Do post copy traitement (mainly managing --preserve option)"""
        st_file = os.stat(source_file)
        for preserve in options.preserve:
            try:
                if preserve == 'mode':
                    os.chmod(dest_file, st_file.st_mode)
                elif preserve == 'ownership':
                    os.chown(dest_file, st_file.st_uid, st_file.st_gid)
                elif preserve == 'timestamps':
                    os.utime(dest_file, (st_file.st_atime, st_file.st_mtime))
            except OSError as e:
                self.journal.error("preserve-"+preserve)

    def __get_string_size(self, size):
        """Return a nice string representation of a size"""
        if size >= 2**50:
            return _("%.2f PiB") % (float(size) / 2**50)
        if size >= 2**40:
            return _("%.2f TiB") % (float(size) / 2**40)
        if size >= 2**30:
            return _("%.2f GiB") % (float(size) / 2**30)
        if size >= 2**20:
            return _("%.2f MiB") % (float(size) / 2**20)
        if size >= 2**10:
            return _("%.2f KiB") % (float(size) / 2**10)
        return _("%i B") % size

    def _pbar_update(self):
        """Update progress bar position, create the bar if it doesn't exist"""
        assert(self.progress)
        try:
            if self.pbar.maxval != self.bytes_total:
                self.pbar.maxval = self.bytes_total
                pbar_msg = _("Copying %s") % self.__get_string_size(self.bytes_total)
                self.pbar.widgets[0] = pbar_msg
        except AttributeError:
            if not self.bytes_total:
                # No progress bar if the files have a null size
                return
            pbar_msg = _("Copying %s") % self.__get_string_size(self.bytes_total)
            self.pbar = ProgressBar(self.bytes_total,
                                    [pbar_msg, " ", Percentage(), " ", Bar(),
                                     " ", FileTransferSpeed(), " ", ETA()])
            self.pbar.start()
        self.pbar.update(self.bytes_copied)

    def __pbar_finish(self):
        """Mark the progression as finished"""
        assert(self.progress)
        try:
            self.pbar.finish()
        except AttributeError:
            pass

    def __sourcesSaving(self,options,args):
        """Manage saving/loading/deleting etc of sources files
        @param options: options as parsed from command line
        @param args: args parsed from command line"""
        if options.sources_save or options.sources_load\
           or options.sources_list or options.sources_full_list\
           or options.sources_del or options.sources_replace:
            try:
                with open(os.path.expanduser(const_SAVED_LIST),'r') as saved_fd:
                    saved_files = pickle.load(saved_fd)
            except:
                saved_files={}

        if options.sources_del:
            if options.sources_del not in saved_files:
                error(_("No saved sources with this name, check existing names with --sources-list"))
            else:
                del saved_files[options.sources_del]
                with open(os.path.expanduser(const_SAVED_LIST),'w') as saved_fd:
                    pickle.dump(saved_files,saved_fd)
            if not args:
                exit(0)

        if options.sources_list or options.sources_full_list:
            info(_('Saved sources:'))
            sources = list(saved_files.keys())
            sources.sort()
            for source in sources:
                info("\t[%s]" % source)
                if options.sources_full_list:
                    for filename in saved_files[source]:
                        info("\t\t%s" % filename)
            info("---\n")
            if not args:
                exit(0)

        if options.sources_save or options.sources_replace:
            if options.sources_save in saved_files and not options.sources_replace:
                error(_("There is already a saved sources with this name, skipping --sources-save"))
            else:
                if len(args)>1:
                    saved_files[options.sources_save] = list(map(os.path.abspath,args[:-1]))
                    with open(os.path.expanduser(const_SAVED_LIST),'w') as saved_fd:
                        pickle.dump(saved_files,saved_fd)

        if options.sources_load:
            if options.sources_load not in saved_files:
                error(_("No saved sources with this name, check existing names with --sources-list"))
            else:
                saved_args = saved_files[options.sources_load]
                saved_args.reverse()
                for arg in saved_args:
                    args.insert(0,arg)

    def parseArguments(self, full_args=sys.argv[1:], source_dir = os.getcwd()):
        """Parse arguments and add files to queue
        @param full_args: list of arguments strings (without program name)
        @param source_dir: path from where the arguments come, as given by os.getcwd()
        @return: a tuple (boolean, message) where the boolean is the success of the arguments
                 validation, and message is the error message to print when necessary"""
        _usage="""
        %(prog)s [options] FILE DEST
        %(prog)s [options] FILE1 [FILE2 ...] DEST-DIR
        """
        for idx in range(len(full_args)):
            full_args[idx] = full_args[idx].encode('utf-8')

        parser = ArgumentParser(usage=_usage,
                                formatter_class=RawDescriptionHelpFormatter)

        parser.add_argument("-V", "--version",
            action="version", version=ABOUT
        )

        group_cplike = parser.add_argument_group("cp-like options")
        group_cplike.add_argument("-f", "--force",
            action="store_true", default=False,
            help=_("force overwriting of existing files")
        )
        group_cplike.add_argument("-L", "--dereference",
            action="store_true", default=False,
            help=_("always follow symbolic links in sources")
        )
        group_cplike.add_argument("-P", "--no-dereference",
            action="store_false", dest='dereference',
            help=_("never follow symbolic links in sources")
        )
        group_cplike.add_argument("-p",
            action="store_true", default=False,
            help=_("same as --preserve=%s" % const_PRESERVE_p)
        )
        group_cplike.add_argument("--preserve",
            action="store", default='',
            help=_("preserve specified attributes; accepted values: \
                   'all', or one or more amongst %s") % str(const_PRESERVE)
        )
        group_cplike.add_argument("-r", "-R", "--recursive",
            action="store_true", default=False,
            help=_("copy directories recursively")
        )
        group_cplike.add_argument("-v", "--verbose",
            action="store_true", default=False,
            help=_("display what is being done")
        )
        parser.add_argument_group(group_cplike)

        group_gcpspecific = parser.add_argument_group("gcp-specific options")
        #parser.add_argument("--no-unicode-fix",
        #    action="store_false", dest='unicode_fix', default=True,
        #    help=_("don't fix name encoding errors") #TODO
        #)
        group_gcpspecific.add_argument("--fix-filenames",
            choices = const_FS_FIX, dest='fix_filenames', default='auto',
            help=_("fix file names incompatible with the destination \
                   file system (default: auto)")
        )
        group_gcpspecific.add_argument("--no-fs-fix",
            action="store_true", dest='no_fs_fix', default=False,
            help=_("[DEPRECATED] same as --fix-filename=no (overrides \
                   --fix-filenames)")
        )
        group_gcpspecific.add_argument("--no-progress",
            action="store_false", dest="progress", default=True,
            help=_("disable progress bar")
        )
        parser.add_argument_group(group_gcpspecific)

        group_saving = parser.add_argument_group("sources saving")
        group_saving.add_argument("--sources-save",
            action="store",
            help=_("save sources arguments")
        )
        group_saving.add_argument("--sources-replace",
            action="store",
            help=_("save sources arguments and replace memory if it already exists")
        )
        group_saving.add_argument("--sources-load",
            action="store",
            help=_("load sources arguments")
        )
        group_saving.add_argument("--sources-del",
            action="store",
            help=_("delete saved sources list")
        )
        group_saving.add_argument("--sources-list",
            action="store_true", default=False,
            help=_("list names of saved sources")
        )
        group_saving.add_argument("--sources-full-list",
            action="store_true", default=False,
            help=_("list names of saved sources and files in it")
        )
        parser.add_argument_group(group_saving)

        (options, args) = parser.parse_known_args()

        # True only in the special case: we are copying a dir and it doesn't
        # exists:
        options.directdir = False

        # options check
        if options.progress and not pbar_available:
            warning (_("Progress bar is not available, deactivating"))
            options.progress = self.progress = False
        else:
            self.progress = options.progress

        if options.verbose:
            logging.getLogger().setLevel(logging.DEBUG)

        if options.no_fs_fix:
            options.fix_filenames = 'no'

        preserve = set()

        if options.p:
            preserve.update(const_PRESERVE_p.split(','))

        if options.preserve:
            preserve.update(options.preserve.split(','))
            preserve_all = False
            for value in preserve:
                if value == 'all':
                    preserve_all = True
                    continue
                if value not in const_PRESERVE:
                    error (_("Invalid --preserve value '%s'") % value)
                    exit(1)
            if preserve_all:
                preserve.remove('all')
                preserve.update(const_PRESERVE)

        options.preserve = preserve

        self.__sourcesSaving(options, args)

        if len(args) == 2: #we check special cases
            src_path = os.path.abspath(os.path.expanduser(args[0]))
            dest_path = os.path.abspath(os.path.expanduser(args[1]))
            if os.path.isdir(src_path):
                options.dest_file = None #we are copying a dir, this options is for files only
                if not os.path.exists(dest_path):
                    options.directdir = True #dest_dir doesn't exist, it's the directdir special case
            elif not os.path.exists(dest_path) or os.path.isfile(dest_path):
                options.dest_file = dest_path
                args[1] = os.path.dirname(dest_path)
            else:
                options.dest_file = None
        else:
            options.dest_file = None

        #if there is an other instance of gcp, we send options to it
        if not self._main_instance:
            info (_("There is already one instance of %s running, pluging to it") % NAME_SHORT)
            #XXX: we have to serialize data as dbus only accept valid unicode, and filenames
            #     can have invalid unicode.
            return self.gcp_main.addArgs(pickle.dumps(os.getcwd()),pickle.dumps(full_args))
        else:
            if len(args) < 2:
                _error_msg = _("Wrong number of arguments")
                return (False, _error_msg)
            debug(_("adding args to gcp: %s") % args)
            self.__checkArgs(options, source_dir, args)
            if not self.__launched:
                self.journal = Journal()
                GLib.idle_add(self.__copyNextFile)
                self.__launched = True
        return (True,'')

    def _userInterruption(self):
        info(_("User interruption: good bye"))
        exit(1)

    def go(self):
        """Launch main loop"""
        self.loop = GLib.MainLoop()
        try:
            self.loop.run()
        except KeyboardInterrupt:
            self._userInterruption()


if __name__ == "__main__":
    gcp = GCP()
    success,message = gcp.parseArguments()
    if not success:
        error(message)
        exit(1)
    if gcp._main_instance:
        gcp.go()
        if gcp.journal.failed:
            exit(1)
        if gcp.journal.partial:
            exit(2)