Dulwich.io dulwich / e82e290
New upstream version 0.18.0 Jelmer Vernooij 1 year, 11 months ago
79 changed file(s) with 3526 addition(s) and 1660 deletion(s). Raw diff Collapse all Expand all
2222 env: TEST_REQUIRE=fastimport
2323
2424 install:
25 - travis_retry pip install -U pip coverage codecov $TEST_REQUIRE
25 - travis_retry pip install -U pip coverage codecov flake8 $TEST_REQUIRE
2626
2727 script:
2828 # Test without c extensions
3232 - python setup.py build_ext -i
3333 - python -m coverage run -p --source=dulwich -m unittest dulwich.tests.test_suite
3434
35 # Style
36 - make style
37
3538 after_success:
3639 - python -m coverage combine
3740 - codecov
0 Jelmer Vernooij <jelmer@jelmer.uk>
0 Jelmer Vernooij <jelmer@jelmer.uk>
1 Dave Borowitz <dborowitz@google.com>
2 John Carr <john.carr@unrouted.co.uk>
3 Gary van der Merwe <garyvdm@gmail.com>
4 milki <milki@rescomp.berkeley.edu>
5 Augie Fackler <durin42@gmail.com>
6 Tay Ray Chuan <rctay89@gmail.com>
7 Risto Kankkunen <risto.kankkunen@iki.fi>
8 Jonas Haag <jonas@lophus.org>
9 Fabien Boucher <fabien.boucher@enovance.com>
110 James Westby <jw+debian@jameswestby.net>
2 John Carr <john.carr@unrouted.co.uk>
3 Dave Borowitz <dborowitz@google.com>
11 Mike Edgar <adgar@google.com>
12 Koen Martens <gmc@sonologic.nl>
13 Abderrahim Kitouni <a.kitouni@gmail.com>
14 William Grant <william.grant@canonical.com>
15 Marcin Kuzminski <marcin@python-works.com>
16 Ryan Faulkner <rfaulk@yahoo-inc.com>
17 Julian Berman <Julian@GrayVines.com>
18 Mark Mikofski <mark.mikofski@sunpowercorp.com>
19 Michael K <michael-k@users.noreply.github.com>
20 Ali Sabil <ali.sabil@gmail.com>
21 Damien Tournoud <damien@commerceguys.com>
22 Hannu Valtonen <hannu.valtonen@ohmu.fi>
23 Mika Mäenpää <mika.j.maenpaa@iki.fi>
24 Paul Hummer <paul@eventuallyanyway.com>
25 Lele Gaifax <lele@metapensiero.it>
26 Lukasz Balcerzak <lukasz.balcerzak@python-center.org>
27 Tommy Yu <tommy.yu@auckland.ac.nz>
28 anatoly techtonik <techtonik@gmail.com>
29 bmcorser <bmcorser@gmail.com>
30 Brendan Cully <brendan@kublai.com>
31 Chow Loong Jin <hyperair@debian.org>
432 Chris Eberle <eberle1080@gmail.com>
5 "milki" <milki@rescomp.berkeley.edu>
6 Gary van der Merwe <garyvdm@gmail.com>
33 Dmitriy <dkomarov@gmail.com>
34 Hervé Cauwelier <herve@oursours.net>
35 Hugo Osvaldo Barrera <hugo@barrera.io>
36 Jameson Nash <jameson@mit.edu>
37 Marc Brinkmann <git@marcbrinkmann.de>
38 Nicolas Dandrimont <nicolas@dandrimont.eu>
39 Robert Brown <robert.brown@gmail.com>
40 Siddharth Agarwal <sid0@fb.com>
41 Stefan Zimmermann <zimmermann.code@gmail.com>
42 Takeshi Kanemoto <tak.kanemoto@gmail.com>
43 Yifan Zhang <yifan@wavii.com>
44 Aaron O'Mullan <aaron.omullan@friendco.de>
45 Adam "Cezar" Jenkins <emperorcezar@gmail.com>
46 Alberto Ruiz <aruiz@gnome.org>
47 Alexander Belchenko <bialix@ukr.net>
48 Andreas Kloeckner <inform@tiker.net>
49 André Roth <neolynx@gmail.com>
50 Benjamin Pollack <benjamin@bitquabit.com>
51 Benoit HERVIER <khertan@khertan.net>
52 Dan Callaghan <dcallagh@redhat.com>
53 David Keijser <david.keijser@klarna.com>
54 David Ostrovsky <david@ostrovsky.org>
55 David Pursehouse <david.pursehouse@gmail.com>
56 Dmitrij D. Czarkoff <czarkoff@gmail.com>
57 Doug Hellmann <doug@doughellmann.com>
58 Dov Feldstern <dovdevel@gmail.com>
59 Félix Mattrat <felix@dysosmus.net>
60 Hwee Miin Koh <hwee-miin.koh@ubisoft.com>
61 Jason R. Coombs <jaraco@jaraco.com>
62 Jeremy Whitlock <jcscoobyrs@gmail.com>
63 John Arbash Meinel <john@arbash-meinel.com>
64 Laurent Rineau <laurent.rineau@cgal.org>
65 Martin Packman <gzlist@googlemail.com>
66 Max Shawabkeh <max99x@gmail.com>
67 Michael Hudson <michael.hudson@canonical.com>
68 Nick Stenning <nick@whiteink.com>
69 Nick Ward <ward.nickjames@gmail.com>
70 Paul Chen <lancevdance@gmail.com>
71 Roland Mas <lolando@debian.org>
72 Ronald Blaschke <ron@rblasch.org>
73 Ronny Pfannschmidt <Ronny.Pfannschmidt@gmx.de>
74 Ross Light <ross@zombiezen.com>
75 Ryan McKern <ryan@orangefort.com>
76 Ted Horst <ted.horst@earthlink.net>
77 Thomas Liebetraut <thomas@tommie-lie.de>
78 Timo Schmid <info@bluec0re.eu>
79 Víðir Valberg Guðmundsson <vidir.valberg@orn.li>
80 dak180 <dak180@users.sourceforge.net>
81 Akbar Gumbira <akbargumbira@gmail.com>
82 Alex Holmes <alex.holmes@isotoma.com>
83 Andi McClure <andi.m.mcclure@gmail.com>
84 Andres Lowrie <andres.lowrie@gmail.com>
85 Artem Tikhomirov <artem.tikhomirov@syntevo.com>
86 Brian Visel <eode@eptitude.net>
87 Bruce Duncan <Bruce.Duncan@ed.ac.uk>
88 Bruno Renié <brutasse@gmail.com>
89 Chaiwat Suttipongsakul <cwt@bashell.com>
90 Chris Bunney <crbunney@users.noreply.github.com>
91 Chris Reid <chris@reidsy.com>
92 Daniele Sluijters <daniele.sluijters@gmail.com>
93 David Bennett <davbennett@google.com>
94 David Blewett <davidb@sixfeetup.com>
95 David Carr <david@carrclan.us>
96 Dirk <dirk@opani.com>
97 Elan Ruusamäe <glen@delfi.ee>
98 Forrest Hopkins <fingerheroes@gmail.com>
99 Hal Wine <hal.wine@gmail.com>
100 Hans Kolek <hkolek@gmail.com>
101 Jakub Wilk <jwilk@jwilk.net>
102 JonChu <jchonphoenix@gmail.com>
103 Kostis Anagnostopoulos <ankostis@gmail.com>
104 Kyle Kelly <kkelly@yelp.com>
105 Lionel Flandrin <lionel@svkt.org>
106 Max Bowsher <_@maxb.eu>
107 Mike Williams <miwilliams@google.com>
108 Mikhail Terekhov <terekhov@emc.com>
109 Nix <nix@esperi.co.uk>
110 OnMaster <wme@CONTACT.DE>
111 Pascal Quantin <pascal.quantin@gmail.com>
112 Ricardo Salveti <ricardo.salveti@openbossa.org>
113 Rod Cloutier <rodcloutier@gmail.com>
114 Sam Vilain <svilain@saymedia.com>
115 Stefano Rivera <stefano@rivera.za.net>
116 Steven Myint <git@stevenmyint.com>
117 Søren Løvborg <sorenl@unity3d.com>
118 Travis Cline <travis.cline@gmail.com>
119 Victor Stinner <vstinner@redhat.com>
120 Volodymyr Holovko <vholovko@gmail.com>
121 Yuval Langer <yuval.langer@gmail.com>
122 codingtony <tony.bussieres@gmail.com>
123 jon bain <jsbain@yahoo.com>
124 kwatters <kwatters@tagged.com>
125 max <max0d41@github.com>
126 Segev Finer <segev208@gmail.com>
127 fviolette <fviolette@talend.com>
128 dzhuang <dzhuang.scut@gmail.com>
129 Antoine Pietri <antoine.pietri1@gmail.com>
7130
8 Hervé Cauwelier <herve@itaapy.com> wrote the original tutorial.
9
10 See the revision history for a full list of contributors.
131 If you contributed but are missing from this list, please send me an e-mail.
00 PYTHON = python
11 PYFLAKES = pyflakes
22 PEP8 = pep8
3 FLAKE8 ?= flake8
34 SETUP = $(PYTHON) setup.py
45 PYDOCTOR ?= pydoctor
56 TESTRUNNER ?= unittest
5657 pep8:
5758 $(PEP8) dulwich
5859
60 style:
61 $(FLAKE8) --exclude=build,.git,build-pypy,.tox
62
5963 before-push: check
6064 git diff origin/master | $(PEP8) --diff
6165
+214
-161
NEWS less more
0 0.18.0 2017-07-31
1
2 BUG FIXES
3
4 * Fix remaining tests on Windows. (Jelmer Vernooij, #493)
5
6 * Fix build of C extensions with Python 3 on Windows.
7 (Jelmer Vernooij)
8
9 * Pass 'mkdir' argument onto Repo.init_bare in Repo.clone.
10 (Jelmer Vernooij, #504)
11
12 * In ``dulwich.porcelain.add``, if no files are specified,
13 add from current working directory rather than repository root.
14 (Jelmer Vernooij, #521)
15
16 * Properly deal with submodules in 'porcelain.status'.
17 (Jelmer Vernooij, #517)
18
19 * ``dulwich.porcelain.remove`` now actually removes files from
20 disk, not just from the index. (Jelmer Vernooij, #488)
21
22 * Fix handling of "reset" command with markers and without
23 "from". (Antoine Pietri)
24
25 * Fix handling of "merge" command with markers. (Antoine Pietri)
26
27 * Support treeish argument to porcelain.reset(), rather than
28 requiring a ref/commit id. (Jelmer Vernooij)
29
30 * Handle race condition when mtime doesn't change between writes/reads.
31 (Jelmer Vernooij, #541)
32
33 * Fix ``dulwich.porcelain.show`` on commits with Python 3.
34 (Jelmer Vernooij, #532)
35
36 IMPROVEMENTS
37
38 * Add basic support for reading ignore files in ``dulwich.ignore``.
39 ``dulwich.porcelain.add`` and ``dulwich.porcelain.status`` now honor
40 ignores. (Jelmer Vernooij, Segev Finer, #524, #526)
41
42 * New ``dulwich.porcelain.check_ignore`` command.
43 (Jelmer Vernooij)
44
45 * ``dulwich.porcelain.status`` now supports a ``ignored`` argument.
46 (Jelmer Vernooij)
47
48 DOCUMENTATION
49
50 * Clarified docstrings for Client.{send_pack,fetch_pack} implementations.
51 (Jelmer Vernooij, #523)
52
053 0.17.3 2017-03-20
154
255 PLATFORM SUPPORT
1164
1265 BUG FIXES
1366
14 * Add workaround for
15 https://bitbucket.org/pypy/pypy/issues/2499/cpyext-pystring_asstring-doesnt-work,
16 fixing Dulwich when used with C extensions on pypy < 5.6. (Victor Stinner)
17
18 * Properly quote config values with a '#' character in them.
19 (Jelmer Vernooij, #511)
67 * Add workaround for
68 https://bitbucket.org/pypy/pypy/issues/2499/cpyext-pystring_asstring-doesnt-work,
69 fixing Dulwich when used with C extensions on pypy < 5.6. (Victor Stinner)
70
71 * Properly quote config values with a '#' character in them.
72 (Jelmer Vernooij, #511)
2073
2174 0.17.1 2017-03-01
2275
2376 IMPROVEMENTS
2477
25 * Add basic 'dulwich pull' command. (Jelmer Vernooij)
26
27 BUG FIXES
28
29 * Cope with existing submodules during pull.
30 (Jelmer Vernooij, #505)
78 * Add basic 'dulwich pull' command. (Jelmer Vernooij)
79
80 BUG FIXES
81
82 * Cope with existing submodules during pull.
83 (Jelmer Vernooij, #505)
3184
3285 0.17.0 2017-03-01
3386
3487 TEST FIXES
3588
36 * Skip test that requires sync to synchronize filesystems if os.sync is
37 not available. (Koen Martens)
38
39 IMPROVEMENTS
40
41 * Implement MemoryRepo.{set_description,get_description}.
42 (Jelmer Vernooij)
43
44 * Raise exception in Repo.stage() when absolute paths are
45 passed in. Allow passing in relative paths to
46 porcelain.add().(Jelmer Vernooij)
47
48 BUG FIXES
49
50 * Handle multi-line quoted values in config files.
51 (Jelmer Vernooij, #495)
52
53 * Allow porcelain.clone of repository without HEAD.
54 (Jelmer Vernooij, #501)
55
56 * Support passing tag ids to Walker()'s include argument.
57 (Jelmer Vernooij)
58
59 * Don't strip trailing newlines from extra headers.
60 (Nicolas Dandrimont)
61
62 * Set bufsize=0 for subprocess interaction with SSH client.
63 Fixes hangs on Python 3. (René Stern, #434)
64
65 * Don't drop first slash for SSH paths, except for those
66 starting with "~". (Jelmer Vernooij, René Stern, #463)
67
68 * Properly log off after retrieving just refs.
69 (Jelmer Vernooij)
89 * Skip test that requires sync to synchronize filesystems if os.sync is
90 not available. (Koen Martens)
91
92 IMPROVEMENTS
93
94 * Implement MemoryRepo.{set_description,get_description}.
95 (Jelmer Vernooij)
96
97 * Raise exception in Repo.stage() when absolute paths are
98 passed in. Allow passing in relative paths to
99 porcelain.add().(Jelmer Vernooij)
100
101 BUG FIXES
102
103 * Handle multi-line quoted values in config files.
104 (Jelmer Vernooij, #495)
105
106 * Allow porcelain.clone of repository without HEAD.
107 (Jelmer Vernooij, #501)
108
109 * Support passing tag ids to Walker()'s include argument.
110 (Jelmer Vernooij)
111
112 * Don't strip trailing newlines from extra headers.
113 (Nicolas Dandrimont)
114
115 * Set bufsize=0 for subprocess interaction with SSH client.
116 Fixes hangs on Python 3. (René Stern, #434)
117
118 * Don't drop first slash for SSH paths, except for those
119 starting with "~". (Jelmer Vernooij, René Stern, #463)
120
121 * Properly log off after retrieving just refs.
122 (Jelmer Vernooij)
70123
71124 0.16.3 2016-01-14
72125
504557
505558 IMPROVEMENTS
506559
507 * Add support for recursive add in 'git add'.
508 (Ryan Faulkner, Jelmer Vernooij)
509
510 * Add porcelain 'list_tags'. (Ryan Faulkner)
511
512 * Add porcelain 'push'. (Ryan Faulkner)
513
514 * Add porcelain 'pull'. (Ryan Faulkner)
515
516 * Support 'http.proxy' in HttpGitClient.
517 (Jelmer Vernooij, #1096030)
518
519 * Support 'http.useragent' in HttpGitClient.
560 * Add support for recursive add in 'git add'.
561 (Ryan Faulkner, Jelmer Vernooij)
562
563 * Add porcelain 'list_tags'. (Ryan Faulkner)
564
565 * Add porcelain 'push'. (Ryan Faulkner)
566
567 * Add porcelain 'pull'. (Ryan Faulkner)
568
569 * Support 'http.proxy' in HttpGitClient.
570 (Jelmer Vernooij, #1096030)
571
572 * Support 'http.useragent' in HttpGitClient.
573 (Jelmer Vernooij)
574
575 * In server, wait for clients to send empty list of
576 wants when talking to empty repository.
577 (Damien Tournoud)
578
579 * Various changes to improve compatibility with
580 Python 3. (Gary van der Merwe)
581
582 BUG FIXES
583
584 * Support unseekable 'wsgi.input' streams.
585 (Jonas Haag)
586
587 * Raise TypeError when passing unicode() object
588 to Repo.__getitem__.
589 (Jonas Haag)
590
591 * Fix handling of `reset` command in dulwich.fastexport.
592 (Jelmer Vernooij, #1249029)
593
594 * In client, don't wait for server to close connection
595 first. Fixes hang when used against GitHub
596 server implementation. (Siddharth Agarwal)
597
598 * DeltaChainIterator: fix a corner case where an object is inflated as an
599 object already in the repository.
600 (Damien Tournoud, #135)
601
602 * Stop leaking file handles during pack reload. (Damien Tournoud)
603
604 * Avoid reopening packs during pack cache reload. (Jelmer Vernooij)
605
606 API CHANGES
607
608 * Drop support for Python 2.6. (Jelmer Vernooij)
609
610 0.9.5 2014-02-23
611
612 IMPROVEMENTS
613
614 * Add porcelain 'tag'. (Ryan Faulkner)
615
616 * New module `dulwich.objectspec` for parsing strings referencing
617 objects and commit ranges. (Jelmer Vernooij)
618
619 * Add shallow branch support. (milki)
620
621 * Allow passing urllib2 `opener` into HttpGitClient.
622 (Dov Feldstern, #909037)
623
624 CHANGES
625
626 * Drop support for Python 2.4 and 2.5. (Jelmer Vernooij)
627
628 API CHANGES
629
630 * Remove long deprecated ``Repo.commit``, ``Repo.get_blob``,
631 ``Repo.tree`` and ``Repo.tag``. (Jelmer Vernooij)
632
633 * Remove long deprecated ``Repo.revision_history`` and ``Repo.ref``.
634 (Jelmer Vernooij)
635
636 * Remove long deprecated ``Tree.entries``. (Jelmer Vernooij)
637
638 BUG FIXES
639
640 * Raise KeyError rather than TypeError when passing in
641 unicode object of length 20 or 40 to Repo.__getitem__.
642 (Jelmer Vernooij)
643
644 * Use 'rm' rather than 'unlink' in tests, since the latter
645 does not exist on OpenBSD and other platforms.
646 (Dmitrij D. Czarkoff)
647
648 0.9.4 2013-11-30
649
650 IMPROVEMENTS
651
652 * Add ssh_kwargs attribute to ParamikoSSHVendor. (milki)
653
654 * Add Repo.set_description(). (Víðir Valberg Guðmundsson)
655
656 * Add a basic `dulwich.porcelain` module. (Jelmer Vernooij, Marcin Kuzminski)
657
658 * Various performance improvements for object access.
520659 (Jelmer Vernooij)
521660
522 * In server, wait for clients to send empty list of
523 wants when talking to empty repository.
524 (Damien Tournoud)
525
526 * Various changes to improve compatibility with
527 Python 3. (Gary van der Merwe)
528
529 BUG FIXES
530
531 * Support unseekable 'wsgi.input' streams.
532 (Jonas Haag)
533
534 * Raise TypeError when passing unicode() object
535 to Repo.__getitem__.
536 (Jonas Haag)
537
538 * Fix handling of `reset` command in dulwich.fastexport.
539 (Jelmer Vernooij, #1249029)
540
541 * In client, don't wait for server to close connection
542 first. Fixes hang when used against GitHub
543 server implementation. (Siddharth Agarwal)
544
545 * DeltaChainIterator: fix a corner case where an object is inflated as an
546 object already in the repository.
547 (Damien Tournoud, #135)
548
549 * Stop leaking file handles during pack reload. (Damien Tournoud)
550
551 * Avoid reopening packs during pack cache reload. (Jelmer Vernooij)
552
553 API CHANGES
554
555 * Drop support for Python 2.6. (Jelmer Vernooij)
556
557 0.9.5 2014-02-23
558
559 IMPROVEMENTS
560
561 * Add porcelain 'tag'. (Ryan Faulkner)
562
563 * New module `dulwich.objectspec` for parsing strings referencing
564 objects and commit ranges. (Jelmer Vernooij)
565
566 * Add shallow branch support. (milki)
567
568 * Allow passing urllib2 `opener` into HttpGitClient.
569 (Dov Feldstern, #909037)
570
571 CHANGES
572
573 * Drop support for Python 2.4 and 2.5. (Jelmer Vernooij)
574
575 API CHANGES
576
577 * Remove long deprecated ``Repo.commit``, ``Repo.get_blob``,
578 ``Repo.tree`` and ``Repo.tag``. (Jelmer Vernooij)
579
580 * Remove long deprecated ``Repo.revision_history`` and ``Repo.ref``.
581 (Jelmer Vernooij)
582
583 * Remove long deprecated ``Tree.entries``. (Jelmer Vernooij)
584
585 BUG FIXES
586
587 * Raise KeyError rather than TypeError when passing in
588 unicode object of length 20 or 40 to Repo.__getitem__.
589 (Jelmer Vernooij)
590
591 * Use 'rm' rather than 'unlink' in tests, since the latter
592 does not exist on OpenBSD and other platforms.
593 (Dmitrij D. Czarkoff)
594
595 0.9.4 2013-11-30
596
597 IMPROVEMENTS
598
599 * Add ssh_kwargs attribute to ParamikoSSHVendor. (milki)
600
601 * Add Repo.set_description(). (Víðir Valberg Guðmundsson)
602
603 * Add a basic `dulwich.porcelain` module. (Jelmer Vernooij, Marcin Kuzminski)
604
605 * Various performance improvements for object access.
606 (Jelmer Vernooij)
607
608 * New function `get_transport_and_path_from_url`,
609 similar to `get_transport_and_path` but only
610 supports URLs.
611 (Jelmer Vernooij)
612
613 * Add support for file:// URLs in `get_transport_and_path_from_url`.
614 (Jelmer Vernooij)
615
616 * Add LocalGitClient implementation.
617 (Jelmer Vernooij)
661 * New function `get_transport_and_path_from_url`,
662 similar to `get_transport_and_path` but only
663 supports URLs.
664 (Jelmer Vernooij)
665
666 * Add support for file:// URLs in `get_transport_and_path_from_url`.
667 (Jelmer Vernooij)
668
669 * Add LocalGitClient implementation.
670 (Jelmer Vernooij)
618671
619672 BUG FIXES
620673
918971 * Smart protocol clients can now change refs even if they are
919972 not uploading new data. (Jelmer Vernooij, #855993)
920973
921 * Don't compile C extensions when running in pypy.
922 (Ronny Pfannschmidt, #881546)
923
924 * Use different name for strnlen replacement function to avoid clashing
925 with system strnlen. (Jelmer Vernooij, #880362)
974 * Don't compile C extensions when running in pypy.
975 (Ronny Pfannschmidt, #881546)
976
977 * Use different name for strnlen replacement function to avoid clashing
978 with system strnlen. (Jelmer Vernooij, #880362)
926979
927980 API CHANGES
928981
15201573
15211574 0.1.0 2009-01-24
15221575
1523 * Initial release.
1576 * Initial release.
00 Metadata-Version: 1.1
11 Name: dulwich
2 Version: 0.17.3
2 Version: 0.18.0
33 Summary: Python Git Library
44 Home-page: https://www.dulwich.io/
5 Author: Jelmer Vernooij
5 Author: UNKNOWN
66 Author-email: jelmer@jelmer.uk
77 License: Apachev2 or later or GPLv2
88 Description:
1212 All functionality is available in pure Python. Optional
1313 C extensions can be built for improved performance.
1414
15 The project is named after the part of London that Mr. and Mrs. Git live in
16 in the particular Monty Python sketch.
15 The project is named after the part of London that Mr. and Mrs. Git live
16 in in the particular Monty Python sketch.
1717
1818 Keywords: git
1919 Platform: UNKNOWN
2727 Classifier: Programming Language :: Python :: Implementation :: CPython
2828 Classifier: Programming Language :: Python :: Implementation :: PyPy
2929 Classifier: Operating System :: POSIX
30 Classifier: Operating System :: Microsoft :: Windows
3031 Classifier: Topic :: Software Development :: Version Control
00 [![Build Status](https://travis-ci.org/jelmer/dulwich.png?branch=master)](https://travis-ci.org/jelmer/dulwich)
1 [![Windows Build status](https://ci.appveyor.com/api/projects/status/cnothr6pxprfx2lf/branch/master?svg=true)](https://ci.appveyor.com/project/jelmer/dulwich-njb6g/branch/master)
12
23 This is the Dulwich project.
34
2627
2728 $ pip install dulwich --global-option="--pure"
2829
30 Getting started
31 ---------------
32
33 Dulwich comes with both a lower-level API and higher-level plumbing ("porcelain").
34
35 For example, to use the lower level API to access the commit message of the
36 last commit:
37
38 >>> from dulwich.repo import Repo
39 >>> r = Repo('.')
40 >>> r.head()
41 '57fbe010446356833a6ad1600059d80b1e731e15'
42 >>> c = r[r.head()]
43 >>> c
44 <Commit 015fc1267258458901a94d228e39f0a378370466>
45 >>> c.message
46 'Add note about encoding.\n'
47
48 And to print it using porcelain:
49
50 >>> from dulwich import porcelain
51 >>> porcelain.log('.', max_entries=1)
52 --------------------------------------------------
53 commit: 57fbe010446356833a6ad1600059d80b1e731e15
54 Author: Jelmer Vernooij <jelmer@jelmer.uk>
55 Date: Sat Apr 29 2017 23:57:34 +0000
56
57 Add note about encoding.
58
2959 Further documentation
3060 ---------------------
3161
4373 and [dulwich-discuss](https://groups.google.com/forum/#!forum/dulwich-discuss)
4474 mailing lists.
4575
76 Contributing
77 ------------
78
79 For a full list of contributors, see the git logs or [AUTHORS](AUTHORS).
80
81 If you'd like to contribute to Dulwich, see the [CONTRIBUTING](CONTRIBUTING.md)
82 file and [list of open issues](https://github.com/jelmer/dulwich/issues).
83
4684 Supported versions of Python
4785 ----------------------------
4886
00 environment:
1
12 matrix:
3
24 - PYTHON: "C:\\Python27"
3 PYWIN32_URL: "https://downloads.sourceforge.net/project/pywin32/pywin32/Build%20220/pywin32-220.win32-py2.7.exe"
5 PYTHON_VERSION: "2.7.x"
6 PYTHON_ARCH: "32"
7
8 - PYTHON: "C:\\Python27-x64"
9 PYTHON_VERSION: "2.7.x"
10 PYTHON_ARCH: "64"
11
12 - PYTHON: "C:\\Python33"
13 PYTHON_VERSION: "3.3.x"
14 PYTHON_ARCH: "32"
15
16 - PYTHON: "C:\\Python33-x64"
17 PYTHON_VERSION: "3.3.x"
18 PYTHON_ARCH: "64"
19 DISTUTILS_USE_SDK: "1"
420
521 - PYTHON: "C:\\Python34"
6 PYWIN32_URL: "https://downloads.sourceforge.net/project/pywin32/pywin32/Build%20220/pywin32-220.win32-py3.4.exe"
22 PYTHON_VERSION: "3.4.x"
23 PYTHON_ARCH: "32"
24
25 - PYTHON: "C:\\Python34-x64"
26 PYTHON_VERSION: "3.4.x"
27 PYTHON_ARCH: "64"
28 DISTUTILS_USE_SDK: "1"
729
830 - PYTHON: "C:\\Python35"
9 PYWIN32_URL: "https://downloads.sourceforge.net/project/pywin32/pywin32/Build%20220/pywin32-220.win32-py3.5.exe"
10
11 - PYTHON: "C:\\Python27-x64"
12 PYWIN32_URL: "https://downloads.sourceforge.net/project/pywin32/pywin32/Build%20220/pywin32-220.win-amd64-py2.7.exe"
13
14 - PYTHON: "C:\\Python34-x64"
15 PYWIN32_URL: "https://downloads.sourceforge.net/project/pywin32/pywin32/Build%20220/pywin32-220.win-amd64-py3.4.exe"
31 PYTHON_VERSION: "3.5.x"
32 PYTHON_ARCH: "32"
1633
1734 - PYTHON: "C:\\Python35-x64"
18 PYWIN32_URL: "https://downloads.sourceforge.net/project/pywin32/pywin32/Build%20220/pywin32-220.win-amd64-py3.5.exe"
35 PYTHON_VERSION: "3.5.x"
36 PYTHON_ARCH: "64"
37
38 - PYTHON: "C:\\Python36"
39 PYTHON_VERSION: "3.6.x"
40 PYTHON_ARCH: "32"
41
42 - PYTHON: "C:\\Python36-x64"
43 PYTHON_VERSION: "3.6.x"
44 PYTHON_ARCH: "64"
1945
2046 install:
21 - ps: (new-object net.webclient).DownloadFile($env:PYWIN32_URL, 'c:\\pywin32.exe')
22 - "%PYTHON%/Scripts/easy_install.exe c:\\pywin32.exe"
23 - "%PYTHON%/Scripts/easy_install.exe wheel"
47 # If there is a newer build queued for the same PR, cancel this one.
48 # The AppVeyor 'rollout builds' option is supposed to serve the same
49 # purpose but it is problematic because it tends to cancel builds pushed
50 # directly to master instead of just PR builds (or the converse).
51 # credits: JuliaLang developers.
52 - ps: if ($env:APPVEYOR_PULL_REQUEST_NUMBER -and $env:APPVEYOR_BUILD_NUMBER -ne ((Invoke-RestMethod `
53 https://ci.appveyor.com/api/projects/$env:APPVEYOR_ACCOUNT_NAME/$env:APPVEYOR_PROJECT_SLUG/history?recordsNumber=50).builds | `
54 Where-Object pullRequestId -eq $env:APPVEYOR_PULL_REQUEST_NUMBER)[0].buildNumber) { `
55 throw "There are newer queued builds for this pull request, failing early." }
56 - ECHO "Filesystem root:"
57 - ps: "ls \"C:/\""
2458
25 build: off
59 - ECHO "Installed SDKs:"
60 - ps: "ls \"C:/Program Files/Microsoft SDKs/Windows\""
61
62 # Install Python (from the official .msi of http://python.org) and pip when
63 # not already installed.
64 - ps: if (-not(Test-Path($env:PYTHON))) { & appveyor\install.ps1 }
65
66 # Prepend newly installed Python to the PATH of this build (this cannot be
67 # done from inside the powershell script as it would require to restart
68 # the parent CMD process).
69 - "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%"
70
71 # Check that we have the expected version and architecture for Python
72 - "build.cmd %PYTHON%\\python.exe --version"
73 - "build.cmd %PYTHON%\\python.exe -c \"import struct; print(struct.calcsize('P') * 8)\""
74
75 # Install setuptools/wheel so that we can e.g. use bdist_wheel
76 - "pip install setuptools wheel"
77
78 - "build.cmd %PYTHON%\\python.exe setup.py develop"
79
80 build_script:
81 # Build the compiled extension
82 - "build.cmd %PYTHON%\\python.exe setup.py build"
2683
2784 test_script:
28 - "%WITH_COMPILER% %PYTHON%/python setup.py test"
85 - "build.cmd %PYTHON%\\python.exe setup.py test"
2986
3087 after_test:
31 - "%WITH_COMPILER% %PYTHON%/python setup.py bdist_wheel"
88 - "build.cmd %PYTHON%\\python.exe setup.py bdist_wheel"
89 # http://stackoverflow.com/questions/43255455/unicode-character-causing-error-with-bdist-wininst-on-python-3-but-not-python-2
90 # - "python setup.py bdist_wininst"
91 - "build.cmd %PYTHON%\\python.exe setup.py bdist_msi"
92 - ps: "ls dist"
3293
3394 artifacts:
3495 - path: dist\*
506506 return cmd_kls(args[1:])
507507
508508
509 class cmd_check_ignore(Command):
510
511 def run(self, args):
512 parser = optparse.OptionParser()
513 options, args = parser.parse_args(args)
514 ret = 1
515 for path in porcelain.check_ignore('.', args):
516 print(path)
517 ret = 0
518 return ret
519
520
509521 class cmd_help(Command):
510522
511523 def run(self, args):
531543 commands = {
532544 "add": cmd_add,
533545 "archive": cmd_archive,
546 "check-ignore": cmd_check_ignore,
534547 "clone": cmd_clone,
535548 "commit": cmd_commit,
536549 "commit-tree": cmd_commit_tree,
22 # dulwich documentation build configuration file, created by
33 # sphinx-quickstart on Thu Feb 18 23:18:28 2010.
44 #
5 # This file is execfile()d with the current directory set to its containing dir.
5 # This file is execfile()d with the current directory set to its containing
6 # dir.
67 #
78 # Note that not all possible configuration values are present in this
89 # autogenerated file.
1011 # All configuration values have a default; values that are commented out
1112 # serve to show the default.
1213
13 import sys, os
14 import os
15 import sys
1416
1517 # If extensions (or modules to document with autodoc) are in another directory,
1618 # add these directories to sys.path here. If the directory is relative to the
1921 sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__))))
2022 dulwich = __import__('dulwich')
2123
22 # -- General configuration -----------------------------------------------------
23
24 # Add any Sphinx extension module names here, as strings. They can be extensions
25 # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
24 # -- General configuration ----------------------------------------------------
25
26 # Add any Sphinx extension module names here, as strings. They can be
27 # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
2628 extensions = ['sphinx.ext.autodoc']
2729 try:
2830 import rst2pdf
4143 source_suffix = '.txt'
4244
4345 # The encoding of source files.
44 #source_encoding = 'utf-8'
46 # source_encoding = 'utf-8'
4547
4648 # The master toctree document.
4749 master_doc = 'index'
6163
6264 # The language for content autogenerated by Sphinx. Refer to documentation
6365 # for a list of supported languages.
64 #language = None
66 # language = None
6567
6668 # There are two options for replacing |today|: either, you set today to some
6769 # non-false value, then it is used:
68 #today = ''
70 # today = ''
6971 # Else, today_fmt is used as the format for a strftime call.
70 #today_fmt = '%B %d, %Y'
72 # today_fmt = '%B %d, %Y'
7173
7274 # List of documents that shouldn't be included in the build.
73 #unused_docs = []
75 # unused_docs = []
7476
7577 # List of directories, relative to source directory, that shouldn't be searched
7678 # for source files.
7779 exclude_trees = ['build']
7880
79 # The reST default role (used for this markup: `text`) to use for all documents.
80 #default_role = None
81 # The reST default role (used for this markup: `text`) to use for all
82 # documents.
83 # default_role = None
8184
8285 # If true, '()' will be appended to :func: etc. cross-reference text.
83 #add_function_parentheses = True
86 # add_function_parentheses = True
8487
8588 # If true, the current module name will be prepended to all description
8689 # unit titles (such as .. function::).
87 #add_module_names = True
90 # add_module_names = True
8891
8992 # If true, sectionauthor and moduleauthor directives will be shown in the
9093 # output. They are ignored by default.
91 #show_authors = False
94 # show_authors = False
9295
9396 # The name of the Pygments (syntax highlighting) style to use.
9497 pygments_style = 'sphinx'
9598
9699 # A list of ignored prefixes for module index sorting.
97 #modindex_common_prefix = []
98
99
100 # -- Options for HTML output ---------------------------------------------------
100 # modindex_common_prefix = []
101
102
103 # -- Options for HTML output -------------------------------------------------
101104
102105 # The theme to use for HTML and HTML Help pages. Major themes that come with
103106 # Sphinx are currently 'default' and 'sphinxdoc'.
104 #html_theme = 'default'
107 # html_theme = 'default'
105108 html_theme = 'nature'
106109
107110 # Theme options are theme-specific and customize the look and feel of a theme
108111 # further. For a list of options available for each theme, see the
109112 # documentation.
110 #html_theme_options = {}
113 # html_theme_options = {}
111114
112115 # Add any paths that contain custom themes here, relative to this directory.
113116 html_theme_path = ['theme']
114117
115118 # The name for this set of Sphinx documents. If None, it defaults to
116119 # "<project> v<release> documentation".
117 #html_title = None
120 # html_title = None
118121
119122 # A shorter title for the navigation bar. Default is the same as html_title.
120 #html_short_title = None
123 # html_short_title = None
121124
122125 # The name of an image file (relative to this directory) to place at the top
123126 # of the sidebar.
124 #html_logo = None
127 # html_logo = None
125128
126129 # The name of an image file (within the static path) to use as favicon of the
127130 # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
128131 # pixels large.
129 #html_favicon = None
132 # html_favicon = None
130133
131134 # Add any paths that contain custom static files (such as style sheets) here,
132135 # relative to this directory. They are copied after the builtin static files,
135138
136139 # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
137140 # using the given strftime format.
138 #html_last_updated_fmt = '%b %d, %Y'
141 # html_last_updated_fmt = '%b %d, %Y'
139142
140143 # If true, SmartyPants will be used to convert quotes and dashes to
141144 # typographically correct entities.
142 #html_use_smartypants = True
145 # html_use_smartypants = True
143146
144147 # Custom sidebar templates, maps document names to template names.
145 #html_sidebars = {}
148 # html_sidebars = {}
146149
147150 # Additional templates that should be rendered to pages, maps page names to
148151 # template names.
149 #html_additional_pages = {}
152 # html_additional_pages = {}
150153
151154 # If false, no module index is generated.
152 #html_use_modindex = True
155 # html_use_modindex = True
153156
154157 # If false, no index is generated.
155 #html_use_index = True
158 # html_use_index = True
156159
157160 # If true, the index is split into individual pages for each letter.
158 #html_split_index = False
161 # html_split_index = False
159162
160163 # If true, links to the reST sources are added to the pages.
161 #html_show_sourcelink = True
164 # html_show_sourcelink = True
162165
163166 # If true, an OpenSearch description file will be output, and all pages will
164167 # contain a <link> tag referring to it. The value of this option must be the
165168 # base URL from which the finished HTML is served.
166 #html_use_opensearch = ''
169 # html_use_opensearch = ''
167170
168171 # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
169 #html_file_suffix = ''
172 # html_file_suffix = ''
170173
171174 # Output file base name for HTML help builder.
172175 htmlhelp_basename = 'dulwichdoc'
173176
174177
175 # -- Options for LaTeX output --------------------------------------------------
178 # -- Options for LaTeX output ------------------------------------------------
176179
177180 # The paper size ('letter' or 'a4').
178 #latex_paper_size = 'letter'
181 # latex_paper_size = 'letter'
179182
180183 # The font size ('10pt', '11pt' or '12pt').
181 #latex_font_size = '10pt'
184 # latex_font_size = '10pt'
182185
183186 # Grouping the document tree into LaTeX files. List of tuples
184 # (source start file, target name, title, author, documentclass [howto/manual]).
187 # (source start file, target name, title, author, documentclass
188 # [howto/manual]).
185189 latex_documents = [
186190 ('index', 'dulwich.tex', u'dulwich Documentation',
187191 u'Jelmer Vernooij', 'manual'),
189193
190194 # The name of an image file (relative to this directory) to place at the top of
191195 # the title page.
192 #latex_logo = None
196 # latex_logo = None
193197
194198 # For "manual" documents, if this is true, then toplevel headings are parts,
195199 # not chapters.
196 #latex_use_parts = False
200 # latex_use_parts = False
197201
198202 # Additional stuff for the LaTeX preamble.
199 #latex_preamble = ''
203 # latex_preamble = ''
200204
201205 # Documents to append as an appendix to all manuals.
202 #latex_appendices = []
206 # latex_appendices = []
203207
204208 # If false, no module index is generated.
205 #latex_use_modindex = True
209 # latex_use_modindex = True
206210
207211 pdf_documents = [
208212 ('index', u'dulwich', u'Documentation for dulwich',
209213 u'Jelmer Vernooij'),
210214 ]
211 pdf_stylesheets = ['sphinx','kerning','a4']
215 pdf_stylesheets = ['sphinx', 'kerning', 'a4']
212216 pdf_break_level = 2
213217 pdf_inline_footnotes = True
214
3232 [parent <parent sha> if several parents from merges]
3333 author <author name> <author e-mail> <timestamp> <timezone>
3434 committer <author name> <author e-mail> <timestamp> <timezone>
35
35
3636 <commit message>
3737
3838 But where are the changes you committed? The commit contains a reference to a
8989 per object) or in a ``pack`` file, which is a container for a number of these
9090 objects.
9191
92 The is also an index of the current state of the working copy in the
92 There is also an index of the current state of the working copy in the
9393 repository as well as files to track the existing branches and tags.
9494
9595 For a more detailed explanation of object formats and SHA-1 digests, see:
3131 >>> open("testrepo/testfile", "w").write("data")
3232 >>> porcelain.add(r, "testfile")
3333 >>> porcelain.commit(r, b"A sample commit")
34
35 Push changes
36 ------------
37
38 >>> tr = porcelain.init("targetrepo")
39 >>> r = porcelain.push("testrepo", "targetrepo", "master")
2727 SSH (git+ssh://) or tunneled over HTTP (http://).
2828
2929 Dulwich provides support for accessing remote repositories in
30 ``dulwich.client``. To create a new client, you can either construct
30 ``dulwich.client``. To create a new client, you can construct
3131 one manually::
3232
3333 >>> from dulwich.client import TCPGitClient
2121
2222 """Python implementation of the Git file formats and protocols."""
2323
24 __version__ = (0, 17, 3)
24 __version__ = (0, 18, 0)
2424 typedef unsigned short mode_t;
2525 #endif
2626
27 #if (PY_VERSION_HEX < 0x02050000)
28 typedef int Py_ssize_t;
29 #endif
30
31 #if (PY_VERSION_HEX < 0x02060000)
32 #define Py_SIZE(ob) (((PyVarObject*)(ob))->ob_size)
27 #if PY_MAJOR_VERSION < 3
28 typedef long Py_hash_t;
3329 #endif
3430
3531 #if PY_MAJOR_VERSION >= 3
299295 return result;
300296 }
301297
302 static int add_hash(PyObject *get, PyObject *set, char *str, int n)
298 static Py_hash_t add_hash(PyObject *get, PyObject *set, char *str, int n)
303299 {
304300 PyObject *str_obj = NULL, *hash_obj = NULL, *value = NULL,
305301 *set_value = NULL;
306 long hash;
302 Py_hash_t hash;
307303
308304 /* It would be nice to hash without copying str into a PyString, but that
309305 * isn't exposed by the API. */
2121 #include <stdlib.h>
2222 #include <sys/stat.h>
2323
24 #if (PY_VERSION_HEX < 0x02050000)
25 typedef int Py_ssize_t;
26 #endif
27
2824 #if PY_MAJOR_VERSION >= 3
2925 #define PyInt_Check(obj) 0
3026 #define PyInt_CheckExact(obj) 0
6460 static PyObject *py_parse_tree(PyObject *self, PyObject *args, PyObject *kw)
6561 {
6662 char *text, *start, *end;
67 int len, namelen, strict;
63 int len, strict;
64 size_t namelen;
6865 PyObject *ret, *item, *name, *sha, *py_strict = NULL;
6966 static char *kwlist[] = {"text", "strict", NULL};
7067
146143 {
147144 const struct tree_item *a = _a, *b = _b;
148145 const char *remain_a, *remain_b;
149 int ret, common;
146 int ret;
147 size_t common;
150148 if (strlen(a->name) > strlen(b->name)) {
151149 common = strlen(b->name);
152150 remain_a = a->name + common;
174172 static PyObject *py_sorted_tree_items(PyObject *self, PyObject *args)
175173 {
176174 struct tree_item *qsort_entries = NULL;
177 int name_order, num_entries, n = 0, i;
175 int name_order, n = 0, i;
178176 PyObject *entries, *py_name_order, *ret, *key, *value, *py_mode, *py_sha;
179 Py_ssize_t pos = 0;
177 Py_ssize_t pos = 0, num_entries;
180178 int (*cmp)(const void *, const void *);
181179
182180 if (!PyArg_ParseTuple(args, "OO", &entries, &py_name_order))
263263 PyObject *m;
264264 PyObject *errors_module;
265265
266 errors_module = PyImport_ImportModule("dulwich.errors");
267 if (errors_module == NULL)
268 return NULL;
269
270 PyExc_ApplyDeltaError = PyObject_GetAttrString(errors_module, "ApplyDeltaError");
271 Py_DECREF(errors_module);
272 if (PyExc_ApplyDeltaError == NULL)
273 return NULL;
274
275266 #if PY_MAJOR_VERSION >= 3
276267 static struct PyModuleDef moduledef = {
277268 PyModuleDef_HEAD_INIT,
284275 NULL, /* m_clear*/
285276 NULL, /* m_free */
286277 };
278 #endif
279
280 errors_module = PyImport_ImportModule("dulwich.errors");
281 if (errors_module == NULL)
282 return NULL;
283
284 PyExc_ApplyDeltaError = PyObject_GetAttrString(errors_module, "ApplyDeltaError");
285 Py_DECREF(errors_module);
286 if (PyExc_ApplyDeltaError == NULL)
287 return NULL;
288
289 #if PY_MAJOR_VERSION >= 3
287290 m = PyModule_Create(&moduledef);
288291 #else
289292 m = Py_InitModule3("_pack", py_pack_methods, NULL);
3333 """Turn a list of bytestrings into a file-like object.
3434
3535 This is similar to creating a `BytesIO` from a concatenation of the
36 bytestring list, but saves memory by NOT creating one giant bytestring first::
36 bytestring list, but saves memory by NOT creating one giant bytestring
37 first::
3738
38 BytesIO(b''.join(list_of_bytestrings)) =~= ChunkedBytesIO(list_of_bytestrings)
39 BytesIO(b''.join(list_of_bytestrings)) =~= ChunkedBytesIO(
40 list_of_bytestrings)
3941 """
4042 def __init__(self, contents):
4143 self.contents = contents
8385 try:
8486 blob = store[entry.sha]
8587 except KeyError:
86 # Entry probably refers to a submodule, which we don't yet support.
88 # Entry probably refers to a submodule, which we don't yet
89 # support.
8790 continue
8891 data = ChunkedBytesIO(blob.chunked)
8992
9093 info = tarfile.TarInfo()
91 info.name = entry_abspath.decode('ascii') # tarfile only works with ascii.
94 # tarfile only works with ascii.
95 info.name = entry_abspath.decode('ascii')
9296 info.size = blob.raw_length()
9397 info.mode = entry.mode
9498 info.mtime = mtime
103103 return len(select.select([fileno], [], [], 0)[0]) > 0
104104
105105
106 def _win32_peek_avail(handle):
107 """Wrapper around PeekNamedPipe to check how many bytes are available."""
108 from ctypes import byref, wintypes, windll
109 c_avail = wintypes.DWORD()
110 c_message = wintypes.DWORD()
111 success = windll.kernel32.PeekNamedPipe(
112 handle, None, 0, None, byref(c_avail),
113 byref(c_message))
114 if not success:
115 raise OSError(wintypes.GetLastError())
116 return c_avail.value
117
118
106119 COMMON_CAPABILITIES = [CAPABILITY_OFS_DELTA, CAPABILITY_SIDE_BAND_64K]
107120 FETCH_CAPABILITIES = ([CAPABILITY_THIN_PACK, CAPABILITY_MULTI_ACK,
108121 CAPABILITY_MULTI_ACK_DETAILED] +
145158 ref_status[ref] = status
146159 # TODO(jelmer): don't assume encoding of refs is ascii.
147160 raise UpdateRefsError(', '.join([
148 ref.decode('ascii') for ref in ref_status if ref not in ok]) +
161 refname.decode('ascii') for refname in ref_status
162 if refname not in ok]) +
149163 ' failed to update', ref_status=ref_status)
150164
151165 def handle_packet(self, pkt):
230244 """
231245 raise NotImplementedError(cls.from_parsedurl)
232246
233 def send_pack(self, path, determine_wants, generate_pack_contents,
247 def send_pack(self, path, update_refs, generate_pack_contents,
234248 progress=None, write_pack=write_pack_objects):
235249 """Upload a pack to a remote repository.
236250
237251 :param path: Repository path (as bytestring)
252 :param update_refs: Function to determine changes to remote refs.
253 Receive dict with existing remote refs, returns dict with
254 changed refs (name -> sha, where sha=ZERO_SHA for deletions)
238255 :param generate_pack_contents: Function that can return a sequence of
239256 the shas of the objects to upload.
240257 :param progress: Optional progress function
255272 :param path: Path to fetch from (as bytestring)
256273 :param target: Target repository to fetch into
257274 :param determine_wants: Optional function to determine what refs
258 to fetch
275 to fetch. Receives dictionary of name->sha, should return
276 list of shas to fetch. Defaults to all shas.
259277 :param progress: Optional progress function
260278 :return: Dictionary with all remote refs (not just those fetched)
261279 """
265283 # TODO(jelmer): Avoid reading entire file into memory and
266284 # only processing it after the whole file has been fetched.
267285 f = BytesIO()
286
268287 def commit():
269288 if f.tell():
270289 f.seek(0)
271290 target.object_store.add_thin_pack(f.read, None)
291
272292 def abort():
273293 pass
274294 else:
288308 progress=None):
289309 """Retrieve a pack from a git smart server.
290310
291 :param determine_wants: Callback that returns list of commits to fetch
311 :param path: Remote path to fetch from
312 :param determine_wants: Function determine what refs
313 to fetch. Receives dictionary of name->sha, should return
314 list of shas to fetch.
292315 :param graph_walker: Object with next() and ack().
293316 :param pack_data: Callback called for each bit of data in the pack
294317 :param progress: Callback for progress reports (strings)
336359 else:
337360 ok.add(ref)
338361 ref_status[ref] = status
339 raise UpdateRefsError(', '.join([ref for ref in ref_status
340 if ref not in ok]) +
341 b' failed to update',
342 ref_status=ref_status)
362 raise UpdateRefsError(', '.join([
363 refname for refname in ref_status if refname not in ok]) +
364 b' failed to update', ref_status=ref_status)
343365
344366 def _read_side_band64k_data(self, proto, channel_callbacks):
345367 """Read per-channel data.
381403 old_sha1 = old_refs.get(refname, ZERO_SHA)
382404 if not isinstance(old_sha1, bytes):
383405 raise TypeError('old sha1 for %s is not a bytestring: %r' %
384 (refname, old_sha1))
406 (refname, old_sha1))
385407 new_sha1 = new_refs.get(refname, ZERO_SHA)
386408 if not isinstance(new_sha1, bytes):
387409 raise TypeError('old sha1 for %s is not a bytestring %r' %
388 (refname, new_sha1))
410 (refname, new_sha1))
389411
390412 if old_sha1 != new_sha1:
391413 if sent_capabilities:
392 proto.write_pkt_line(old_sha1 + b' ' + new_sha1 + b' ' + refname)
414 proto.write_pkt_line(old_sha1 + b' ' + new_sha1 + b' ' +
415 refname)
393416 else:
394417 proto.write_pkt_line(
395418 old_sha1 + b' ' + new_sha1 + b' ' + refname + b'\0' +
409432 """
410433 if b"side-band-64k" in capabilities:
411434 if progress is None:
412 progress = lambda x: None
435 def progress(x):
436 pass
413437 channel_callbacks = {2: progress}
414438 if CAPABILITY_REPORT_STATUS in capabilities:
415439 channel_callbacks[1] = PktLineParser(
434458 whether there is extra graph data to read on proto
435459 """
436460 assert isinstance(wants, list) and isinstance(wants[0], bytes)
437 proto.write_pkt_line(COMMAND_WANT + b' ' + wants[0] + b' ' + b' '.join(capabilities) + b'\n')
461 proto.write_pkt_line(COMMAND_WANT + b' ' + wants[0] + b' ' +
462 b' '.join(capabilities) + b'\n')
438463 for want in wants[1:]:
439464 proto.write_pkt_line(COMMAND_WANT + b' ' + want + b'\n')
440465 proto.write_pkt_line(None)
480505 if CAPABILITY_SIDE_BAND_64K in capabilities:
481506 if progress is None:
482507 # Just ignore progress data
483 progress = lambda x: None
508
509 def progress(x):
510 pass
484511 self._read_side_band64k_data(proto, {
485512 SIDE_BAND_CHANNEL_DATA: pack_data,
486513 SIDE_BAND_CHANNEL_PROGRESS: progress}
516543 """
517544 raise NotImplementedError()
518545
519 def send_pack(self, path, determine_wants, generate_pack_contents,
546 def send_pack(self, path, update_refs, generate_pack_contents,
520547 progress=None, write_pack=write_pack_objects):
521548 """Upload a pack to a remote repository.
522549
523550 :param path: Repository path (as bytestring)
551 :param update_refs: Function to determine changes to remote refs.
552 Receive dict with existing remote refs, returns dict with
553 changed refs (name -> sha, where sha=ZERO_SHA for deletions)
524554 :param generate_pack_contents: Function that can return a sequence of
525555 the shas of the objects to upload.
526556 :param progress: Optional callback called with progress updates
536566 proto, unused_can_read = self._connect(b'receive-pack', path)
537567 with proto:
538568 old_refs, server_capabilities = read_pkt_refs(proto)
539 negotiated_capabilities = self._send_capabilities & server_capabilities
569 negotiated_capabilities = (
570 self._send_capabilities & server_capabilities)
540571
541572 if CAPABILITY_REPORT_STATUS in negotiated_capabilities:
542573 self._report_status_parser = ReportStatusParser()
543574 report_status_parser = self._report_status_parser
544575
545576 try:
546 new_refs = orig_new_refs = determine_wants(dict(old_refs))
577 new_refs = orig_new_refs = update_refs(dict(old_refs))
547578 except:
548579 proto.write_pkt_line(None)
549580 raise
550581
551 if not CAPABILITY_DELETE_REFS in server_capabilities:
582 if CAPABILITY_DELETE_REFS not in server_capabilities:
552583 # Server does not support deletions. Fail later.
553584 new_refs = dict(orig_new_refs)
554585 for ref, sha in orig_new_refs.items():
555586 if sha == ZERO_SHA:
556587 if CAPABILITY_REPORT_STATUS in negotiated_capabilities:
557588 report_status_parser._ref_statuses.append(
558 b'ng ' + sha + b' remote does not support deleting refs')
589 b'ng ' + sha +
590 b' remote does not support deleting refs')
559591 report_status_parser._ref_status_ok = False
560592 del new_refs[ref]
561593
572604
573605 (have, want) = self._handle_receive_pack_head(
574606 proto, negotiated_capabilities, old_refs, new_refs)
575 if not want and set(new_refs.items()).issubset(set(old_refs.items())):
607 if (not want and
608 set(new_refs.items()).issubset(set(old_refs.items()))):
576609 return new_refs
577610 objects = generate_pack_contents(have, want)
578611
591624 progress=None):
592625 """Retrieve a pack from a git smart server.
593626
594 :param determine_wants: Callback that returns list of commits to fetch
627 :param path: Remote path to fetch from
628 :param determine_wants: Function determine what refs
629 to fetch. Receives dictionary of name->sha, should return
630 list of shas to fetch.
595631 :param graph_walker: Object with next() and ack().
596632 :param pack_data: Callback called for each bit of data in the pack
597633 :param progress: Callback for progress reports (strings)
620656 self._handle_upload_pack_head(
621657 proto, negotiated_capabilities, graph_walker, wants, can_read)
622658 self._handle_upload_pack_tail(
623 proto, negotiated_capabilities, graph_walker, pack_data, progress)
659 proto, negotiated_capabilities, graph_walker, pack_data,
660 progress)
624661 return refs
625662
626663 def get_refs(self, path):
701738 rfile = s.makefile('rb', -1)
702739 # 0 means unbuffered
703740 wfile = s.makefile('wb', 0)
741
704742 def close():
705743 rfile.close()
706744 wfile.close()
711749 if path.startswith(b"/~"):
712750 path = path[1:]
713751 # TODO(jelmer): Alternative to ascii?
714 proto.send_cmd(b'git-' + cmd, path, b'host=' + self._host.encode('ascii'))
752 proto.send_cmd(
753 b'git-' + cmd, path, b'host=' + self._host.encode('ascii'))
715754 return proto, lambda: _fileno_can_read(s)
716755
717756
729768 def can_read(self):
730769 if sys.platform == 'win32':
731770 from msvcrt import get_osfhandle
732 from win32pipe import PeekNamedPipe
733771 handle = get_osfhandle(self.proc.stdout.fileno())
734 data, total_bytes_avail, msg_bytes_left = PeekNamedPipe(handle, 0)
735 return total_bytes_avail != 0
772 return _win32_peek_avail(handle) != 0
736773 else:
737774 return _fileno_can_read(self.proc.stdout.fileno())
738775
747784 def find_git_command():
748785 """Find command to run for system Git (usually C Git).
749786 """
750 if sys.platform == 'win32': # support .exe, .bat and .cmd
751 try: # to avoid overhead
787 if sys.platform == 'win32': # support .exe, .bat and .cmd
788 try: # to avoid overhead
752789 import win32api
753 except ImportError: # run through cmd.exe with some overhead
790 except ImportError: # run through cmd.exe with some overhead
754791 return ['cmd', '/c', 'git']
755792 else:
756793 status, git = win32api.FindExecutable('git')
779816 def _connect(self, service, path):
780817 if not isinstance(service, bytes):
781818 raise TypeError(service)
782 if not isinstance(path, bytes):
783 path = path.encode(self._remote_path_encoding)
819 if isinstance(path, bytes):
820 path = path.decode(self._remote_path_encoding)
784821 if self.git_command is None:
785822 git_command = find_git_command()
786823 argv = git_command + [service.decode('ascii'), path]
819856 path = path.decode(sys.getfilesystemencoding())
820857 return closing(Repo(path))
821858
822 def send_pack(self, path, determine_wants, generate_pack_contents,
859 def send_pack(self, path, update_refs, generate_pack_contents,
823860 progress=None, write_pack=write_pack_objects):
824861 """Upload a pack to a remote repository.
825862
826863 :param path: Repository path (as bytestring)
864 :param update_refs: Function to determine changes to remote refs.
865 Receive dict with existing remote refs, returns dict with
866 changed refs (name -> sha, where sha=ZERO_SHA for deletions)
827867 :param generate_pack_contents: Function that can return a sequence of
828868 the shas of the objects to upload.
829869 :param progress: Optional progress function
837877 {refname: new_ref}, including deleted refs.
838878 """
839879 if not progress:
840 progress = lambda x: None
841
842 with self._open_repo(path) as target:
880 def progress(x):
881 pass
882
883 with self._open_repo(path) as target:
843884 old_refs = target.get_refs()
844 new_refs = determine_wants(dict(old_refs))
885 new_refs = update_refs(dict(old_refs))
845886
846887 have = [sha1 for sha1 in old_refs.values() if sha1 != ZERO_SHA]
847888 want = []
848889 for refname, new_sha1 in new_refs.items():
849 if new_sha1 not in have and not new_sha1 in want and new_sha1 != ZERO_SHA:
890 if (new_sha1 not in have and
891 new_sha1 not in want and
892 new_sha1 != ZERO_SHA):
850893 want.append(new_sha1)
851894
852 if not want and set(new_refs.items()).issubset(set(old_refs.items())):
895 if (not want and
896 set(new_refs.items()).issubset(set(old_refs.items()))):
853897 return new_refs
854898
855899 target.object_store.add_objects(generate_pack_contents(have, want))
857901 for refname, new_sha1 in new_refs.items():
858902 old_sha1 = old_refs.get(refname, ZERO_SHA)
859903 if new_sha1 != ZERO_SHA:
860 if not target.refs.set_if_equals(refname, old_sha1, new_sha1):
861 progress('unable to set %s to %s' % (refname, new_sha1))
904 if not target.refs.set_if_equals(
905 refname, old_sha1, new_sha1):
906 progress('unable to set %s to %s' %
907 (refname, new_sha1))
862908 else:
863909 if not target.refs.remove_if_equals(refname, old_sha1):
864910 progress('unable to remove %s' % refname)
870916
871917 :param path: Path to fetch from (as bytestring)
872918 :param target: Target repository to fetch into
873 :param determine_wants: Optional function to determine what refs
874 to fetch
919 :param determine_wants: Optional function determine what refs
920 to fetch. Receives dictionary of name->sha, should return
921 list of shas to fetch. Defaults to all shas.
875922 :param progress: Optional progress function
876923 :return: Dictionary with all remote refs (not just those fetched)
877924 """
883930 progress=None):
884931 """Retrieve a pack from a git smart server.
885932
886 :param determine_wants: Callback that returns list of commits to fetch
933 :param path: Remote path to fetch from
934 :param determine_wants: Function determine what refs
935 to fetch. Receives dictionary of name->sha, should return
936 list of shas to fetch.
887937 :param graph_walker: Object with next() and ack().
888938 :param pack_data: Callback called for each bit of data in the pack
889939 :param progress: Callback for progress reports (strings)
890940 :return: Dictionary with all remote refs (not just those fetched)
891941 """
892942 with self._open_repo(path) as r:
893 objects_iter = r.fetch_objects(determine_wants, graph_walker, progress)
894
895 # Did the process short-circuit (e.g. in a stateless RPC call)? Note
896 # that the client still expects a 0-object pack in most cases.
943 objects_iter = r.fetch_objects(
944 determine_wants, graph_walker, progress)
945
946 # Did the process short-circuit (e.g. in a stateless RPC call)?
947 # Note that the client still expects a 0-object pack in most cases.
897948 if objects_iter is None:
898949 return
899950 write_pack_objects(ProtocolFile(None, pack_data), objects_iter)
939990 """SSH vendor that shells out to the local 'ssh' command."""
940991
941992 def run_command(self, host, command, username=None, port=None):
942 if not isinstance(command, bytes):
943 raise TypeError(command)
944
945 #FIXME: This has no way to deal with passwords..
993 # FIXME: This has no way to deal with passwords..
946994 args = ['ssh', '-x']
947995 if port is not None:
948996 args.extend(['-p', str(port)])
10041052 def _connect(self, cmd, path):
10051053 if not isinstance(cmd, bytes):
10061054 raise TypeError(cmd)
1007 if not isinstance(path, bytes):
1008 path = path.encode(self._remote_path_encoding)
1009 if path.startswith(b"/~"):
1055 if isinstance(path, bytes):
1056 path = path.decode(self._remote_path_encoding)
1057 if path.startswith("/~"):
10101058 path = path[1:]
1011 argv = self._get_cmd_path(cmd) + b" '" + path + b"'"
1059 argv = (self._get_cmd_path(cmd).decode(self._remote_path_encoding) +
1060 " '" + path + "'")
10121061 con = self.ssh_vendor.run_command(
10131062 self.host, argv, port=self.port, username=self.username)
10141063 return (Protocol(con.read, con.write, con.close,
10751124 password=password, username=username, **kwargs)
10761125
10771126 def __repr__(self):
1078 return "%s(%r, dumb=%r)" % (type(self).__name__, self._base_url, self.dumb)
1127 return "%s(%r, dumb=%r)" % (
1128 type(self).__name__, self._base_url, self.dumb)
10791129
10801130 def _get_url(self, path):
1131 if not isinstance(path, str):
1132 # TODO(jelmer): this is unrelated to the local filesystem;
1133 # This is not necessarily the right encoding to decode the path
1134 # with.
1135 path = path.decode(sys.getfilesystemencoding())
10811136 return urlparse.urljoin(self._base_url, path).rstrip("/") + "/"
10821137
10831138 def _http_request(self, url, headers={}, data=None):
11371192 if content_type != (
11381193 "application/x-%s-result" % service):
11391194 raise GitProtocolError("Invalid content-type from server: %s"
1140 % content_type)
1195 % content_type)
11411196 return resp
11421197
1143 def send_pack(self, path, determine_wants, generate_pack_contents,
1198 def send_pack(self, path, update_refs, generate_pack_contents,
11441199 progress=None, write_pack=write_pack_objects):
11451200 """Upload a pack to a remote repository.
11461201
11471202 :param path: Repository path (as bytestring)
1203 :param update_refs: Function to determine changes to remote refs.
1204 Receive dict with existing remote refs, returns dict with
1205 changed refs (name -> sha, where sha=ZERO_SHA for deletions)
11481206 :param generate_pack_contents: Function that can return a sequence of
11491207 the shas of the objects to upload.
11501208 :param progress: Optional progress function
11651223 if CAPABILITY_REPORT_STATUS in negotiated_capabilities:
11661224 self._report_status_parser = ReportStatusParser()
11671225
1168 new_refs = determine_wants(dict(old_refs))
1226 new_refs = update_refs(dict(old_refs))
11691227 if new_refs is None:
11701228 # Determine wants function is aborting the push.
11711229 return old_refs
11841242 data=req_data.getvalue())
11851243 try:
11861244 resp_proto = Protocol(resp.read, None)
1187 self._handle_receive_pack_tail(resp_proto, negotiated_capabilities,
1188 progress)
1245 self._handle_receive_pack_tail(
1246 resp_proto, negotiated_capabilities, progress)
11891247 return new_refs
11901248 finally:
11911249 resp.close()
1192
11931250
11941251 def fetch_pack(self, path, determine_wants, graph_walker, pack_data,
11951252 progress=None):
12041261 url = self._get_url(path)
12051262 refs, server_capabilities = self._discover_references(
12061263 b"git-upload-pack", url)
1207 negotiated_capabilities = self._fetch_capabilities & server_capabilities
1264 negotiated_capabilities = (
1265 self._fetch_capabilities & server_capabilities)
12081266 wants = determine_wants(refs)
12091267 if wants is not None:
12101268 wants = [cid for cid in wants if cid != ZERO_SHA]
12151273 req_data = BytesIO()
12161274 req_proto = Protocol(None, req_data.write)
12171275 self._handle_upload_pack_head(
1218 req_proto, negotiated_capabilities, graph_walker, wants,
1219 lambda: False)
1276 req_proto, negotiated_capabilities, graph_walker, wants,
1277 lambda: False)
12201278 resp = self._smart_request(
12211279 "git-upload-pack", url, data=req_data.getvalue())
12221280 try:
12231281 resp_proto = Protocol(resp.read, None)
1224 self._handle_upload_pack_tail(resp_proto, negotiated_capabilities,
1225 graph_walker, pack_data, progress)
1282 self._handle_upload_pack_tail(
1283 resp_proto, negotiated_capabilities, graph_walker, pack_data,
1284 progress)
12261285 return refs
12271286 finally:
12281287 resp.close()
12821341 # Windows local path
12831342 return default_local_git_client_cls(**kwargs), location
12841343
1285 if ':' in location and not '@' in location:
1344 if ':' in location and '@' not in location:
12861345 # SSH with no user@, zero or one leading slash.
12871346 (hostname, path) = location.split(':', 1)
12881347 return SSHGitClient(hostname, **kwargs), path
171171
172172 def _format_string(value):
173173 if (value.startswith(b" ") or
174 value.startswith(b"\t") or
175 value.endswith(b" ") or
176 b'#' in value or
177 value.endswith(b"\t")):
174 value.startswith(b"\t") or
175 value.endswith(b" ") or
176 b'#' in value or
177 value.endswith(b"\t")):
178178 return b'"' + _escape_value(value) + b'"'
179 return _escape_value(value)
179 else:
180 return _escape_value(value)
180181
181182
182183 _ESCAPE_TABLE = {
188189 }
189190 _COMMENT_CHARS = [ord(b"#"), ord(b";")]
190191 _WHITESPACE_CHARS = [ord(b"\t"), ord(b" ")]
192
191193
192194 def _parse_string(value):
193195 value = bytearray(value.strip())
207209 (value, i))
208210 except KeyError:
209211 raise ValueError(
210 "escape character followed by unknown character %s at %d in %r" %
211 (value[i], i, value))
212 "escape character followed by unknown character "
213 "%s at %d in %r" % (value[i], i, value))
212214 if whitespace:
213215 ret.extend(whitespace)
214216 whitespace = bytearray()
235237
236238 def _escape_value(value):
237239 """Escape a value."""
238 return value.replace(b"\\", b"\\\\").replace(b"\n", b"\\n").replace(b"\t", b"\\t").replace(b"\"", b"\\\"")
240 value = value.replace(b"\\", b"\\\\")
241 value = value.replace(b"\n", b"\\n")
242 value = value.replace(b"\t", b"\\t")
243 value = value.replace(b"\"", b"\\\"")
244 return value
239245
240246
241247 def _check_variable_name(name):
294300 section = (pts[0], pts[1])
295301 else:
296302 if not _check_section_name(pts[0]):
297 raise ValueError("invalid section name %r" %
298 pts[0])
303 raise ValueError(
304 "invalid section name %r" % pts[0])
299305 pts = pts[0].split(b".", 1)
300306 if len(pts) == 2:
301307 section = (pts[0], pts[1])
358364 if subsection_name is None:
359365 f.write(b"[" + section_name + b"]\n")
360366 else:
361 f.write(b"[" + section_name + b" \"" + subsection_name + b"\"]\n")
367 f.write(b"[" + section_name +
368 b" \"" + subsection_name + b"\"]\n")
362369 for key, value in values.items():
363370 if value is True:
364371 value = b"true"
3232 import paramiko
3333 import paramiko.client
3434 import threading
35
3536
3637 class _ParamikoWrapper(object):
3738 STDERR_READ_N = 2048 # 2k
115116
116117 def run_command(self, host, command, username=None, port=None,
117118 progress_stderr=None):
118 if not isinstance(command, bytes):
119 raise TypeError(command)
120119 # Paramiko needs an explicit port. None is not valid
121120 if port is None:
122121 port = 22
285285 connection_timeout=self.http_timeout,
286286 network_timeout=self.http_timeout,
287287 headers=token_header)
288 self.base_path = str(
289 posixpath.join(urlparse.urlparse(self.storage_url).path, self.root))
288 self.base_path = str(posixpath.join(
289 urlparse.urlparse(self.storage_url).path, self.root))
290290
291291 def swift_auth_v1(self):
292292 self.user = self.user.replace(";", ":")
811811 entries.sort()
812812 pack_base_name = posixpath.join(
813813 self.pack_dir,
814 'pack-' + iter_sha1(e[0] for e in entries).decode(sys.getfilesystemencoding()))
814 'pack-' + iter_sha1(e[0] for e in entries).decode(
815 sys.getfilesystemencoding()))
815816 self.scon.put_object(pack_base_name + '.pack', f)
816817
817818 # Write the index.
993994
994995 try:
995996 import gevent
996 import geventhttpclient
997 import geventhttpclient # noqa: F401
997998 except ImportError:
998999 print("gevent and geventhttpclient libraries are mandatory "
9991000 " for use the Swift backend.")
10351036 }
10361037
10371038 if len(sys.argv) < 2:
1038 print("Usage: %s <%s> [OPTIONS...]" % (sys.argv[0], "|".join(commands.keys())))
1039 print("Usage: %s <%s> [OPTIONS...]" % (
1040 sys.argv[0], "|".join(commands.keys())))
10391041 sys.exit(1)
10401042
10411043 cmd = sys.argv[1]
1042 if not cmd in commands:
1044 if cmd not in commands:
10431045 print("No such subcommand: %s" % cmd)
10441046 sys.exit(1)
10451047 commands[cmd](sys.argv[2:])
10461048
1049
10471050 if __name__ == '__main__':
10481051 main()
6464 missing_libs = []
6565
6666 try:
67 import gevent
67 import gevent # noqa:F401
6868 except ImportError:
6969 missing_libs.append("gevent")
7070
7171 try:
72 import geventhttpclient
72 import geventhttpclient # noqa:F401
7373 except ImportError:
7474 missing_libs.append("geventhttpclient")
7575
8080
8181 skipmsg = "Required libraries are not installed (%r)" % missing_libs
8282
83 skipIfPY3 = skipIf(sys.version_info[0] == 3, "SWIFT module not yet ported to python3.")
83 skipIfPY3 = skipIf(sys.version_info[0] == 3,
84 "SWIFT module not yet ported to python3.")
8485
8586 if not missing_libs:
8687 from dulwich.contrib import swift
200201 data.extend([blob, tree, tag, cmt])
201202 return data
202203
204
203205 @skipIf(missing_libs, skipmsg)
204206 class FakeSwiftConnector(object):
205207
251253
252254 def get_object_stat(self, name):
253255 name = posixpath.join(self.root, name)
254 if not name in self.store:
256 if name not in self.store:
255257 return None
256258 return {'content-length': len(self.store[name])}
257259
314316 head = odata[-1].id
315317 peeled_sha = dict([(sha.object[1], sha.id)
316318 for sha in odata if isinstance(sha, Tag)])
317 get_tagged = lambda: peeled_sha
319
320 def get_tagged():
321 return peeled_sha
318322 i = sos.iter_shas(sos.find_missing_objects([],
319323 [head, ],
320324 progress=None,
477481
478482 def setUp(self):
479483 super(TestSwiftInfoRefsContainer, self).setUp()
480 content = \
481 b"22effb216e3a82f97da599b8885a6cadb488b4c5\trefs/heads/master\n" + \
482 b"cca703b0e1399008b53a1a236d6b4584737649e4\trefs/heads/dev"
484 content = (
485 b"22effb216e3a82f97da599b8885a6cadb488b4c5\trefs/heads/master\n"
486 b"cca703b0e1399008b53a1a236d6b4584737649e4\trefs/heads/dev")
483487 self.store = {'fakerepo/info/refs': content}
484488 self.conf = swift.load_conf(file=StringIO(config_file %
485489 def_config_file))
561565
562566 def test_create_root(self):
563567 with patch('dulwich.contrib.swift.SwiftConnector.test_root_exists',
564 lambda *args: None):
568 lambda *args: None):
565569 with patch('geventhttpclient.HTTPClient.request',
566 lambda *args: Response()):
570 lambda *args: Response()):
567571 self.assertEqual(self.conn.create_root(), None)
568572
569573 def test_create_root_fails(self):
615619 self.assertEqual(self.conn.get_object('a').read(), b'content')
616620 with patch('geventhttpclient.HTTPClient.request',
617621 lambda *args, **kwargs: Response(content=b'content')):
618 self.assertEqual(self.conn.get_object('a', range='0-6'), b'content')
622 self.assertEqual(
623 self.conn.get_object('a', range='0-6'),
624 b'content')
619625
620626 def test_get_object_fails(self):
621627 with patch('geventhttpclient.HTTPClient.request',
4141 from gevent import monkey
4242 monkey.patch_all()
4343
44 from dulwich import server
45 from dulwich import repo
46 from dulwich import index
47 from dulwich import client
48 from dulwich import objects
49 from dulwich.contrib import swift
44 from dulwich ( # noqa:E402
45 server,
46 repo,
47 index,
48 client,
49 objects,
50 )
51 from dulwich.contrib import swift # noqa:E402
5052
5153
5254 class DulwichServer():
201203 files = ('testfile', 'testfile2', 'dir/testfile3')
202204 i = 0
203205 for f in files:
204 file(os.path.join(self.temp_d, f), 'w').write("DATA %s" % i)
206 open(os.path.join(self.temp_d, f), 'w').write("DATA %s" % i)
205207 i += 1
206208 local_repo.stage(files)
207209 local_repo.do_commit('Test commit', 'fbo@localhost',
251253 files = ('testfile11', 'testfile22', 'test/testfile33')
252254 i = 0
253255 for f in files:
254 file(os.path.join(self.temp_d, f), 'w').write("DATA %s" % i)
256 open(os.path.join(self.temp_d, f), 'w').write("DATA %s" % i)
255257 i += 1
256258 local_repo.stage(files)
257259 local_repo.do_commit('Test commit', 'fbo@localhost',
172172 source and target tree.
173173 """
174174 if (rename_detector is not None and tree1_id is not None and
175 tree2_id is not None):
175 tree2_id is not None):
176176 for change in rename_detector.changes_with_renames(
177 tree1_id, tree2_id, want_unchanged=want_unchanged):
178 yield change
177 tree1_id, tree2_id, want_unchanged=want_unchanged):
178 yield change
179179 return
180180
181181 entries = walk_trees(store, tree1_id, tree2_id,
254254 path = change.new.path
255255 changes_by_path[path][i] = change
256256
257 old_sha = lambda c: c.old.sha
258 change_type = lambda c: c.type
257 def old_sha(c):
258 return c.old.sha
259
260 def change_type(c):
261 return c.type
259262
260263 # Yield only conflicting changes.
261264 for _, changes in sorted(changes_by_path.items()):
380383 an add/delete pair to be a rename/copy; see _similarity_score.
381384 :param max_files: The maximum number of adds and deletes to consider,
382385 or None for no limit. The detector is guaranteed to compare no more
383 than max_files ** 2 add/delete pairs. This limit is provided because
384 rename detection can be quadratic in the project size. If the limit
385 is exceeded, no content rename detection is attempted.
386 than max_files ** 2 add/delete pairs. This limit is provided
387 because rename detection can be quadratic in the project size. If
388 the limit is exceeded, no content rename detection is attempted.
386389 :param rewrite_threshold: The threshold similarity score below which a
387390 modify should be considered a delete/add, or None to not break
388391 modifies; see _similarity_score.
403406
404407 def _should_split(self, change):
405408 if (self._rewrite_threshold is None or change.type != CHANGE_MODIFY or
406 change.old.sha == change.new.sha):
409 change.old.sha == change.new.sha):
407410 return False
408411 old_obj = self._store[change.old.sha]
409412 new_obj = self._store[change.new.sha]
550553 path = add.new.path
551554 delete = delete_map.get(path)
552555 if (delete is not None and
553 stat.S_IFMT(delete.old.mode) == stat.S_IFMT(add.new.mode)):
556 stat.S_IFMT(delete.old.mode) == stat.S_IFMT(add.new.mode)):
554557 modifies[path] = TreeChange(CHANGE_MODIFY, delete.old, add.new)
555558
556559 self._adds = [a for a in self._adds if a.new.path not in modifies]
569572 def _prune_unchanged(self):
570573 if self._want_unchanged:
571574 return
572 self._deletes = [d for d in self._deletes if d.type != CHANGE_UNCHANGED]
575 self._deletes = [
576 d for d in self._deletes if d.type != CHANGE_UNCHANGED]
573577
574578 def changes_with_renames(self, tree1_id, tree2_id, want_unchanged=False):
575579 """Iterate TreeChanges between two tree SHAs, with rename detection."""
3535 self.got = got
3636 self.extra = extra
3737 if self.extra is None:
38 Exception.__init__(self,
39 "Checksum mismatch: Expected %s, got %s" % (expected, got))
38 Exception.__init__(
39 self, "Checksum mismatch: Expected %s, got %s" %
40 (expected, got))
4041 else:
41 Exception.__init__(self,
42 "Checksum mismatch: Expected %s, got %s; %s" %
42 Exception.__init__(
43 self, "Checksum mismatch: Expected %s, got %s; %s" %
4344 (expected, got, extra))
4445
4546
135136 """Hangup exception."""
136137
137138 def __init__(self):
138 Exception.__init__(self,
139 "The remote server unexpectedly closed the connection.")
139 Exception.__init__(
140 self, "The remote server unexpectedly closed the connection.")
140141
141142
142143 class UnexpectedCommandError(GitProtocolError):
2929 Blob,
3030 Commit,
3131 Tag,
32 ZERO_SHA,
3233 )
3334 from fastimport import __version__ as fastimport_version
34 if fastimport_version <= (0, 9, 5) and sys.version_info[0] == 3 and sys.version_info[1] < 5:
35 if (fastimport_version <= (0, 9, 5) and
36 sys.version_info[0] == 3 and sys.version_info[1] < 5):
3537 raise ImportError("Older versions of fastimport don't support python3<3.5")
36 from fastimport import (
38 from fastimport import ( # noqa: E402
3739 commands,
3840 errors as fastimport_errors,
3941 parser,
4042 processor,
4143 )
4244
43 import stat
45 import stat # noqa: E402
4446
4547
4648 def split_email(text):
6163 self.outf.write(getattr(cmd, "__bytes__", cmd.__repr__)() + b"\n")
6264
6365 def _allocate_marker(self):
64 self._marker_idx+=1
66 self._marker_idx += 1
6567 return ("%d" % (self._marker_idx,)).encode('ascii')
6668
6769 def _export_blob(self, blob):
7678
7779 def _iter_files(self, base_tree, new_tree):
7880 for ((old_path, new_path), (old_mode, new_mode),
79 (old_hexsha, new_hexsha)) in \
81 (old_hexsha, new_hexsha)) in \
8082 self.store.tree_changes(base_tree, new_tree):
8183 if new_path is None:
8284 yield commands.FileDeleteCommand(old_path)
103105 merges = []
104106 author, author_email = split_email(commit.author)
105107 committer, committer_email = split_email(commit.committer)
106 cmd = commands.CommitCommand(ref, marker,
108 cmd = commands.CommitCommand(
109 ref, marker,
107110 (author, author_email, commit.author_time, commit.author_timezone),
108111 (committer, committer_email, commit.commit_time,
109112 commit.commit_timezone),
125128 def __init__(self, repo, params=None, verbose=False, outf=None):
126129 processor.ImportProcessor.__init__(self, params, verbose)
127130 self.repo = repo
128 self.last_commit = None
131 self.last_commit = ZERO_SHA
129132 self.markers = {}
130133 self._contents = {}
131134
173176 blob_id = blob.id
174177 else:
175178 assert filecmd.dataref.startswith(b":"), \
176 "non-marker refs not supported yet (%r)" % filecmd.dataref
179 ("non-marker refs not supported yet (%r)" %
180 filecmd.dataref)
177181 blob_id = self.markers[filecmd.dataref[1:]]
178182 self._contents[filecmd.path] = (filecmd.mode, blob_id)
179183 elif filecmd.name == b"filedelete":
189193 self._contents = {}
190194 else:
191195 raise Exception("Command %s not supported" % filecmd.name)
192 commit.tree = commit_tree(self.repo.object_store,
196 commit.tree = commit_tree(
197 self.repo.object_store,
193198 ((path, hexsha, mode) for (path, (mode, hexsha)) in
194199 self._contents.items()))
195 if self.last_commit is not None:
200 if self.last_commit != ZERO_SHA:
196201 commit.parents.append(self.last_commit)
197 commit.parents += cmd.merges
202 for merge in cmd.merges:
203 if merge.startswith(b':'):
204 merge = self.markers[merge[1:]]
205 commit.parents.append(merge)
198206 self.repo.object_store.add_object(commit)
199207 self.repo[cmd.ref] = commit.id
200208 self.last_commit = commit.id
208216 def _reset_base(self, commit_id):
209217 if self.last_commit == commit_id:
210218 return
219 self._contents = {}
211220 self.last_commit = commit_id
212 self._contents = {}
213 tree_id = self.repo[commit_id].tree
214 for (path, mode, hexsha) in (
215 self.repo.object_store.iter_tree_contents(tree_id)):
216 self._contents[path] = (mode, hexsha)
221 if commit_id != ZERO_SHA:
222 tree_id = self.repo[commit_id].tree
223 for (path, mode, hexsha) in (
224 self.repo.object_store.iter_tree_contents(tree_id)):
225 self._contents[path] = (mode, hexsha)
217226
218227 def reset_handler(self, cmd):
219228 """Process a ResetCommand."""
220 self._reset_base(cmd.from_)
221 self.repo.refs[cmd.ref] = cmd.from_
229 if cmd.from_ is None:
230 from_ = ZERO_SHA
231 else:
232 from_ = cmd.from_
233 if from_.startswith(b":"):
234 from_ = self.markers[from_[1:]]
235 self._reset_base(from_)
236 self.repo.refs[cmd.ref] = from_
222237
223238 def tag_handler(self, cmd):
224239 """Process a TagCommand."""
2424 import os
2525 import sys
2626 import tempfile
27
2728
2829 def ensure_dir_exists(dirname):
2930 """Ensure a directory exists, creating if necessary."""
104105 PROXY_METHODS = ('__iter__', 'flush', 'fileno', 'isatty', 'read',
105106 'readline', 'readlines', 'seek', 'tell',
106107 'truncate', 'write', 'writelines')
108
107109 def __init__(self, filename, mode, bufsize):
108110 self._filename = filename
109111 self._lockfilename = '%s.lock' % self._filename
110 fd = os.open(self._lockfilename,
112 fd = os.open(
113 self._lockfilename,
111114 os.O_RDWR | os.O_CREAT | os.O_EXCL | getattr(os, "O_BINARY", 0))
112115 self._file = os.fdopen(fd, mode, bufsize)
113116 self._closed = False
136139 """Close this file, saving the lockfile over the original.
137140
138141 :note: If this method fails, it will attempt to delete the lockfile.
139 However, it is not guaranteed to do so (e.g. if a filesystem becomes
140 suddenly read-only), which will prevent future writes to this file
141 until the lockfile is removed manually.
142 :raises OSError: if the original file could not be overwritten. The lock
143 file is still closed, so further attempts to write to the same file
144 object will raise ValueError.
142 However, it is not guaranteed to do so (e.g. if a filesystem
143 becomes suddenly read-only), which will prevent future writes to
144 this file until the lockfile is removed manually.
145 :raises OSError: if the original file could not be overwritten. The
146 lock file is still closed, so further attempts to write to the same
147 file object will raise ValueError.
145148 """
146149 if self._closed:
147150 return
151154 os.rename(self._lockfilename, self._filename)
152155 except OSError as e:
153156 if sys.platform == 'win32' and e.errno == errno.EEXIST:
154 # Windows versions prior to Vista don't support atomic renames
157 # Windows versions prior to Vista don't support atomic
158 # renames
155159 _fancy_rename(self._lockfilename, self._filename)
156160 else:
157161 raise
0 # Copyright (C) 2017 Jelmer Vernooij <jelmer@jelmer.uk>
1 #
2 # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
3 # General Public License as public by the Free Software Foundation; version 2.0
4 # or (at your option) any later version. You can redistribute it and/or
5 # modify it under the terms of either of these two licenses.
6 #
7 # Unless required by applicable law or agreed to in writing, software
8 # distributed under the License is distributed on an "AS IS" BASIS,
9 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10 # See the License for the specific language governing permissions and
11 # limitations under the License.
12 #
13 # You should have received a copy of the licenses; if not, see
14 # <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
15 # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
16 # License, Version 2.0.
17 #
18
19 """Parsing of gitignore files.
20
21 For details for the matching rules, see https://git-scm.com/docs/gitignore
22 """
23
24 import os.path
25 import re
26 import sys
27
28
29 def _translate_segment(segment):
30 if segment == b"*":
31 return b'[^/]+'
32 res = b""
33 i, n = 0, len(segment)
34 while i < n:
35 c = segment[i:i+1]
36 i = i+1
37 if c == b'*':
38 res += b'[^/]*'
39 elif c == b'?':
40 res += b'.'
41 elif c == b'[':
42 j = i
43 if j < n and segment[j:j+1] == b'!':
44 j = j+1
45 if j < n and segment[j:j+1] == b']':
46 j = j+1
47 while j < n and segment[j:j+1] != b']':
48 j = j+1
49 if j >= n:
50 res += b'\\['
51 else:
52 stuff = segment[i:j].replace(b'\\', b'\\\\')
53 i = j+1
54 if stuff.startswith(b'!'):
55 stuff = b'^' + stuff[1:]
56 elif stuff.startswith(b'^'):
57 stuff = b'\\' + stuff
58 res += b'[' + stuff + b']'
59 else:
60 res += re.escape(c)
61 return res
62
63
64 def translate(pat):
65 """Translate a shell PATTERN to a regular expression.
66
67 There is no way to quote meta-characters.
68
69 Originally copied from fnmatch in Python 2.7, but modified for Dulwich
70 to cope with features in Git ignore patterns.
71 """
72
73 res = b'(?ms)'
74
75 if b'/' not in pat[:-1]:
76 # If there's no slash, this is a filename-based match
77 res += b'(.*/)?'
78
79 if pat.startswith(b'**/'):
80 # Leading **/
81 pat = pat[2:]
82 res += b'(.*/)?'
83
84 if pat.startswith(b'/'):
85 pat = pat[1:]
86
87 for i, segment in enumerate(pat.split(b'/')):
88 if segment == b'**':
89 res += b'(/.*)?'
90 continue
91 else:
92 res += ((re.escape(b'/') if i > 0 else b'') +
93 _translate_segment(segment))
94
95 if not pat.endswith(b'/'):
96 res += b'/?'
97
98 return res + b'\Z'
99
100
101 def read_ignore_patterns(f):
102 """Read a git ignore file.
103
104 :param f: File-like object to read from
105 :return: List of patterns
106 """
107
108 for l in f:
109 l = l.rstrip(b"\r\n")
110
111 # Ignore blank lines, they're used for readability.
112 if not l:
113 continue
114
115 if l.startswith(b'#'):
116 # Comment
117 continue
118
119 # Trailing spaces are ignored unless they are quoted with a backslash.
120 while l.endswith(b' ') and not l.endswith(b'\\ '):
121 l = l[:-1]
122 l = l.replace(b'\\ ', b' ')
123
124 yield l
125
126
127 def match_pattern(path, pattern, ignorecase=False):
128 """Match a gitignore-style pattern against a path.
129
130 :param path: Path to match
131 :param pattern: Pattern to match
132 :param ignorecase: Whether to do case-sensitive matching
133 :return: bool indicating whether the pattern matched
134 """
135 return Pattern(pattern, ignorecase).match(path)
136
137
138 class Pattern(object):
139 """A single ignore pattern."""
140
141 def __init__(self, pattern, ignorecase=False):
142 self.pattern = pattern
143 self.ignorecase = ignorecase
144 if pattern[0:1] == b'!':
145 self.is_exclude = False
146 pattern = pattern[1:]
147 else:
148 if pattern[0:1] == b'\\':
149 pattern = pattern[1:]
150 self.is_exclude = True
151 flags = 0
152 if self.ignorecase:
153 flags = re.IGNORECASE
154 self._re = re.compile(translate(pattern), flags)
155
156 def __bytes__(self):
157 return self.pattern
158
159 def __str__(self):
160 return self.pattern.decode(sys.getfilesystemencoding())
161
162 def __eq__(self, other):
163 return (type(self) == type(other) and
164 self.pattern == other.pattern and
165 self.ignorecase == other.ignorecase)
166
167 def __repr__(self):
168 return "%s(%s, %r)" % (
169 type(self).__name__, self.pattern, self.ignorecase)
170
171 def match(self, path):
172 """Try to match a path against this ignore pattern.
173
174 :param path: Path to match (relative to ignore location)
175 :return: boolean
176 """
177 return bool(self._re.match(path))
178
179
180 class IgnoreFilter(object):
181
182 def __init__(self, patterns, ignorecase=False):
183 self._patterns = []
184 self._ignorecase = ignorecase
185 for pattern in patterns:
186 self.append_pattern(pattern)
187
188 def append_pattern(self, pattern):
189 """Add a pattern to the set."""
190 self._patterns.append(Pattern(pattern, self._ignorecase))
191
192 def find_matching(self, path):
193 """Yield all matching patterns for path.
194
195 :param path: Path to match
196 :return: Iterator over iterators
197 """
198 if not isinstance(path, bytes):
199 path = path.encode(sys.getfilesystemencoding())
200 for pattern in self._patterns:
201 if pattern.match(path):
202 yield pattern
203
204 def is_ignored(self, path):
205 """Check whether a path is ignored.
206
207 For directories, include a trailing slash.
208
209 :return: status is None if file is not mentioned, True if it is
210 included, False if it is explicitly excluded.
211 """
212 status = None
213 for pattern in self.find_matching(path):
214 status = pattern.is_exclude
215 return status
216
217 @classmethod
218 def from_path(cls, path, ignorecase=False):
219 with open(path, 'rb') as f:
220 ret = cls(read_ignore_patterns(f), ignorecase)
221 ret._path = path
222 return ret
223
224 def __repr__(self):
225 if getattr(self, '_path', None) is None:
226 return "<%s>" % (type(self).__name__)
227 else:
228 return "%s.from_path(%r)" % (type(self).__name__, self._path)
229
230
231 class IgnoreFilterStack(object):
232 """Check for ignore status in multiple filters."""
233
234 def __init__(self, filters):
235 self._filters = filters
236
237 def is_ignored(self, path):
238 """Check whether a path is explicitly included or excluded in ignores.
239
240 :param path: Path to check
241 :return: None if the file is not mentioned, True if it is included,
242 False if it is explicitly excluded.
243 """
244 status = None
245 for filter in self._filters:
246 status = filter.is_ignored(path)
247 if status is not None:
248 return status
249 return status
250
251
252 def default_user_ignore_filter_path(config):
253 """Return default user ignore filter path.
254
255 :param config: A Config object
256 :return: Path to a global ignore file
257 """
258 try:
259 return config.get(('core', ), 'excludesFile')
260 except KeyError:
261 pass
262
263 xdg_config_home = os.environ.get(
264 "XDG_CONFIG_HOME", os.path.expanduser("~/.config/"),
265 )
266 return os.path.join(xdg_config_home, 'git', 'ignore')
267
268
269 class IgnoreFilterManager(object):
270 """Ignore file manager."""
271
272 def __init__(self, top_path, global_filters, ignorecase):
273 self._path_filters = {}
274 self._top_path = top_path
275 self._global_filters = global_filters
276 self._ignorecase = ignorecase
277
278 def __repr__(self):
279 return "%s(%s, %r, %r)" % (
280 type(self).__name__, self._top_path,
281 self._global_filters,
282 self._ignorecase)
283
284 def _load_path(self, path):
285 try:
286 return self._path_filters[path]
287 except KeyError:
288 pass
289
290 p = os.path.join(self._top_path, path, '.gitignore')
291 try:
292 self._path_filters[path] = IgnoreFilter.from_path(
293 p, self._ignorecase)
294 except IOError:
295 self._path_filters[path] = None
296 return self._path_filters[path]
297
298 def find_matching(self, path):
299 """Find matching patterns for path.
300
301 Stops after the first ignore file with matches.
302
303 :param path: Path to check
304 :return: Iterator over Pattern instances
305 """
306 if os.path.isabs(path):
307 raise ValueError('%s is an absolute path' % path)
308 filters = [(0, f) for f in self._global_filters]
309 if os.path.sep != '/':
310 path = path.replace(os.path.sep, '/')
311 parts = path.split('/')
312 for i in range(len(parts)+1):
313 dirname = '/'.join(parts[:i])
314 for s, f in filters:
315 relpath = '/'.join(parts[s:i])
316 if i < len(parts):
317 # Paths leading up to the final part are all directories,
318 # so need a trailing slash.
319 relpath += '/'
320 matches = list(f.find_matching(relpath))
321 if matches:
322 return iter(matches)
323 ignore_filter = self._load_path(dirname)
324 if ignore_filter is not None:
325 filters.insert(0, (i, ignore_filter))
326 return iter([])
327
328 def is_ignored(self, path):
329 """Check whether a path is explicitly included or excluded in ignores.
330
331 :param path: Path to check
332 :return: None if the file is not mentioned, True if it is included,
333 False if it is explicitly excluded.
334 """
335 matches = list(self.find_matching(path))
336 if matches:
337 return matches[-1].is_exclude
338 return None
339
340 @classmethod
341 def from_repo(cls, repo):
342 """Create a IgnoreFilterManager from a repository.
343
344 :param repo: Repository object
345 :return: A `IgnoreFilterManager` object
346 """
347 global_filters = []
348 for p in [
349 os.path.join(repo.controldir(), 'info', 'exclude'),
350 default_user_ignore_filter_path(repo.get_config_stack())]:
351 try:
352 global_filters.append(IgnoreFilter.from_path(p))
353 except IOError:
354 pass
355 config = repo.get_config_stack()
356 ignorecase = config.get_boolean((b'core'), (b'ignorecase'), False)
357 return cls(repo.path, global_filters, ignorecase)
123123 (name, ctime, mtime, dev, ino, mode, uid, gid, size, sha, flags) = entry
124124 write_cache_time(f, ctime)
125125 write_cache_time(f, mtime)
126 flags = len(name) | (flags &~ 0x0fff)
127 f.write(struct.pack(b'>LLLLLL20sH', dev & 0xFFFFFFFF, ino & 0xFFFFFFFF, mode, uid, gid, size, hex_to_sha(sha), flags))
126 flags = len(name) | (flags & ~0x0fff)
127 f.write(struct.pack(
128 b'>LLLLLL20sH', dev & 0xFFFFFFFF, ino & 0xFFFFFFFF,
129 mode, uid, gid, size, hex_to_sha(sha), flags))
128130 f.write(name)
129131 real_size = ((f.tell() - beginoffset + 8) & ~7)
130132 f.write(b'\0' * ((beginoffset + real_size) - f.tell()))
242244 def __getitem__(self, name):
243245 """Retrieve entry by relative path.
244246
245 :return: tuple with (ctime, mtime, dev, ino, mode, uid, gid, size, sha, flags)
247 :return: tuple with (ctime, mtime, dev, ino, mode, uid, gid, size, sha,
248 flags)
246249 """
247250 return self._byname[name]
248251
291294 :param object_store: Object store to use for retrieving tree contents
292295 :param tree: SHA1 of the root tree
293296 :param want_unchanged: Whether unchanged files should be reported
294 :return: Iterator over tuples with (oldpath, newpath), (oldmode, newmode), (oldsha, newsha)
297 :return: Iterator over tuples with (oldpath, newpath), (oldmode,
298 newmode), (oldsha, newsha)
295299 """
296300 def lookup_entry(path):
297301 entry = self[path]
298302 return entry.sha, entry.mode
299 for (name, mode, sha) in changes_from_tree(self._byname.keys(),
300 lookup_entry, object_store, tree,
303 for (name, mode, sha) in changes_from_tree(
304 self._byname.keys(), lookup_entry, object_store, tree,
301305 want_unchanged=want_unchanged):
302306 yield (name, mode, sha)
303307
362366
363367
364368 def changes_from_tree(names, lookup_entry, object_store, tree,
365 want_unchanged=False):
369 want_unchanged=False):
366370 """Find the differences between the contents of a tree and
367371 a working copy.
368372
434438 # FIXME: This will fail on Windows. What should we do instead?
435439 if oldstat:
436440 os.unlink(target_path)
441 if sys.platform == 'win32' and sys.version_info[0] == 3:
442 # os.readlink on Python3 on Windows requires a unicode string.
443 # TODO(jelmer): Don't assume tree_encoding == fs_encoding
444 tree_encoding = sys.getfilesystemencoding()
445 contents = contents.decode(tree_encoding)
446 target_path = target_path.decode(tree_encoding)
437447 os.symlink(contents, target_path)
438448 else:
439449 if oldstat is not None and oldstat.st_size == len(contents):
488498 :param object_store: Non-empty object store holding tree contents
489499 :param honor_filemode: An optional flag to honor core.filemode setting in
490500 config file, default is core.filemode=True, change executable bit
491 :param validate_path_element: Function to validate path elements to check out;
492 default just refuses .git and .. directories.
501 :param validate_path_element: Function to validate path elements to check
502 out; default just refuses .git and .. directories.
493503
494504 :note:: existing index is wiped and contents are not merged
495505 in a working dir. Suitable only for fresh clones.
515525 # TODO(jelmer): record and return submodule paths
516526 else:
517527 obj = object_store[entry.sha]
518 st = build_file_from_blob(obj, entry.mode, full_path,
519 honor_filemode=honor_filemode)
528 st = build_file_from_blob(
529 obj, entry.mode, full_path, honor_filemode=honor_filemode)
520530 # Add file to index
521531 if not honor_filemode or S_ISGITLINK(entry.mode):
522532 # we can not use tuple slicing to build a new tuple,
544554 with open(fs_path, 'rb') as f:
545555 blob.data = f.read()
546556 else:
547 blob.data = os.readlink(fs_path)
557 if sys.platform == 'win32' and sys.version_info[0] == 3:
558 # os.readlink on Python3 on Windows requires a unicode string.
559 # TODO(jelmer): Don't assume tree_encoding == fs_encoding
560 tree_encoding = sys.getfilesystemencoding()
561 fs_path = fs_path.decode(tree_encoding)
562 blob.data = os.readlink(fs_path).encode(tree_encoding)
563 else:
564 blob.data = os.readlink(fs_path)
548565 return blob
549566
550567
561578
562579 for tree_path, entry in index.iteritems():
563580 full_path = _tree_to_fs_path(root_path, tree_path)
564 # TODO(jelmer): handle S_ISGITLINK(entry.mode) here
565581 try:
566582 blob = blob_from_path_and_stat(full_path, os.lstat(full_path))
567583 except OSError as e:
573589 except IOError as e:
574590 if e.errno != errno.EISDIR:
575591 raise
576 # The file was changed to a directory, so consider it removed.
577 yield tree_path
592 # This is actually a directory
593 if os.path.exists(os.path.join(tree_path, '.git')):
594 # Submodule
595 from dulwich.errors import NotGitRepository
596 from dulwich.repo import Repo
597 try:
598 if entry.sha != Repo(tree_path).head():
599 yield tree_path
600 except NotGitRepository:
601 yield tree_path
602 else:
603 # The file was changed to a directory, so consider it removed.
604 yield tree_path
578605 else:
579606 if blob.id != entry.sha:
580607 yield tree_path
3030
3131 For many modules, the only function from the logging module they need is
3232 getLogger; this module exports that function for convenience. If a calling
33 module needs something else, it can import the standard logging module directly.
33 module needs something else, it can import the standard logging module
34 directly.
3435 """
3536
3637 import logging
2121
2222 """Git object store interfaces and implementation."""
2323
24
2524 from io import BytesIO
2625 import errno
2726 from itertools import chain
2928 import stat
3029 import sys
3130 import tempfile
31 import time
3232
3333 from dulwich.diff_tree import (
3434 tree_changes,
7373
7474 def determine_wants_all(self, refs):
7575 return [sha for (ref, sha) in refs.items()
76 if not sha in self and not ref.endswith(b"^{}") and
77 not sha == ZERO_SHA]
76 if sha not in self and not ref.endswith(b"^{}") and
77 not sha == ZERO_SHA]
7878
7979 def iter_shas(self, shas):
8080 """Iterate over the objects for the specified shas.
172172 :param wants: Iterable over SHAs of objects to fetch.
173173 :param progress: Simple progress function that will be called with
174174 updated progress strings.
175 :param get_tagged: Function that returns a dict of pointed-to sha -> tag
176 sha for including tags.
177 :param get_parents: Optional function for getting the parents of a commit.
175 :param get_tagged: Function that returns a dict of pointed-to sha ->
176 tag sha for including tags.
177 :param get_parents: Optional function for getting the parents of a
178 commit.
178179 :return: Iterator over (sha, path) pairs.
179180 """
180 finder = MissingObjectFinder(self, haves, wants, progress, get_tagged, get_parents=get_parents)
181 finder = MissingObjectFinder(self, haves, wants, progress, get_tagged,
182 get_parents=get_parents)
181183 return iter(finder.next, None)
182184
183185 def find_common_revisions(self, graphwalker):
209211
210212 :param sha: The object SHA to peel.
211213 :return: The fully-peeled SHA1 of a tag object, after peeling all
212 intermediate tags; if the original ref does not point to a tag, this
213 will equal the original SHA1.
214 intermediate tags; if the original ref does not point to a tag,
215 this will equal the original SHA1.
214216 """
215217 obj = self[sha]
216218 obj_class = object_class(obj.type_name)
226228 :param heads: commits to start from
227229 :param common: commits to end at, or empty set to walk repository
228230 completely
229 :param get_parents: Optional function for getting the parents of a commit.
231 :param get_parents: Optional function for getting the parents of a
232 commit.
230233 :return: a tuple (A, B) where A - all commits reachable
231234 from heads but not present in common, B - common (shared) elements
232235 that are directly reachable from heads
337340
338341 def __iter__(self):
339342 """Iterate over the SHAs that are present in this store."""
340 iterables = list(self.packs) + [self._iter_loose_objects()] + [self._iter_alternate_objects()]
343 iterables = (list(self.packs) + [self._iter_loose_objects()] +
344 [self._iter_alternate_objects()])
341345 return chain(*iterables)
342346
343347 def contains_loose(self, sha):
427431
428432 def _read_alternate_paths(self):
429433 try:
430 f = GitFile(os.path.join(self.path, INFODIR, "alternates"),
431 'rb')
434 f = GitFile(os.path.join(self.path, INFODIR, "alternates"), 'rb')
432435 except (OSError, IOError) as e:
433436 if e.errno == errno.ENOENT:
434437 return
441444 if os.path.isabs(l):
442445 yield l.decode(sys.getfilesystemencoding())
443446 else:
444 yield os.path.join(self.path, l).decode(sys.getfilesystemencoding())
447 yield os.path.join(self.path, l).decode(
448 sys.getfilesystemencoding())
445449
446450 def add_alternate_path(self, path):
447451 """Add an alternate path to this object store.
476480 self.close()
477481 return
478482 raise
479 self._pack_cache_time = os.stat(self.pack_dir).st_mtime
483 self._pack_cache_time = max(
484 os.stat(self.pack_dir).st_mtime, time.time())
480485 pack_files = set()
481486 for name in pack_dir_contents:
482 assert isinstance(name, basestring if sys.version_info[0] == 2 else str)
483487 if name.startswith("pack-") and name.endswith(".pack"):
484 # verify that idx exists first (otherwise the pack was not yet fully written)
488 # verify that idx exists first (otherwise the pack was not yet
489 # fully written)
485490 idx_name = os.path.splitext(name)[0] + ".idx"
486491 if idx_name in pack_dir_contents:
487492 pack_name = name[:-len(".pack")]
497502
498503 def _pack_cache_stale(self):
499504 try:
500 return os.stat(self.pack_dir).st_mtime > self._pack_cache_time
505 return os.stat(self.pack_dir).st_mtime >= self._pack_cache_time
501506 except OSError as e:
502507 if e.errno == errno.ENOENT:
503508 return True
598603 def add_thin_pack(self, read_all, read_some):
599604 """Add a new thin pack to this object store.
600605
601 Thin packs are packs that contain deltas with parents that exist outside
602 the pack. They should never be placed in the object store directly, and
603 always indexed and completed as they are copied.
604
605 :param read_all: Read function that blocks until the number of requested
606 bytes are read.
606 Thin packs are packs that contain deltas with parents that exist
607 outside the pack. They should never be placed in the object store
608 directly, and always indexed and completed as they are copied.
609
610 :param read_all: Read function that blocks until the number of
611 requested bytes are read.
607612 :param read_some: Read function that returns at least one byte, but may
608613 not return the number of bytes requested.
609614 :return: A Pack object pointing at the now-completed thin pack in the
644649 """
645650 fd, path = tempfile.mkstemp(dir=self.pack_dir, suffix=".pack")
646651 f = os.fdopen(fd, 'wb')
652
647653 def commit():
648654 os.fsync(fd)
649655 f.close()
652658 else:
653659 os.remove(path)
654660 return None
661
655662 def abort():
656663 f.close()
657664 os.remove(path)
670677 if e.errno != errno.EEXIST:
671678 raise
672679 if os.path.exists(path):
673 return # Already there, no need to write again
680 return # Already there, no need to write again
674681 with GitFile(path, 'wb') as f:
675682 f.write(obj.as_legacy_object())
676683
758765 call when the pack is finished.
759766 """
760767 f = BytesIO()
768
761769 def commit():
762770 p = PackData.from_file(BytesIO(f.getvalue()), f.tell())
763771 f.close()
764772 for obj in PackInflater.for_pack_data(p, self.get_raw):
765773 self.add_object(obj)
774
766775 def abort():
767776 pass
768777 return f, commit, abort
793802 def add_thin_pack(self, read_all, read_some):
794803 """Add a new thin pack to this object store.
795804
796 Thin packs are packs that contain deltas with parents that exist outside
797 the pack. Because this object store doesn't support packs, we extract
798 and add the individual objects.
799
800 :param read_all: Read function that blocks until the number of requested
801 bytes are read.
805 Thin packs are packs that contain deltas with parents that exist
806 outside the pack. Because this object store doesn't support packs, we
807 extract and add the individual objects.
808
809 :param read_all: Read function that blocks until the number of
810 requested bytes are read.
802811 :param read_some: Read function that returns at least one byte, but may
803812 not return the number of bytes requested.
804813 """
805814 f, commit, abort = self.add_pack()
806815 try:
807816 indexer = PackIndexer(f, resolve_ext_ref=self.get_raw)
808 copier = PackStreamCopier(read_all, read_some, f, delta_iter=indexer)
817 copier = PackStreamCopier(read_all, read_some, f,
818 delta_iter=indexer)
809819 copier.verify()
810820 self._complete_thin_pack(f, indexer)
811821 except:
10531063 if sha in self._tagged:
10541064 self.add_todo([(self._tagged[sha], None, True)])
10551065 self.sha_done.add(sha)
1056 self.progress(("counting objects: %d\r" % len(self.sha_done)).encode('ascii'))
1066 self.progress(("counting objects: %d\r" %
1067 len(self.sha_done)).encode('ascii'))
10571068 return (sha, name)
10581069
10591070 __next__ = next
11081119 ret = self.heads.pop()
11091120 ps = self.get_parents(ret)
11101121 self.parents[ret] = ps
1111 self.heads.update([p for p in ps if not p in self.parents])
1122 self.heads.update(
1123 [p for p in ps if p not in self.parents])
11121124 return ret
11131125 return None
11141126
135135
136136 def object_header(num_type, length):
137137 """Return an object header for the given numeric type and text length."""
138 return object_class(num_type).type_name + b' ' + str(length).encode('ascii') + b'\0'
138 return (object_class(num_type).type_name +
139 b' ' + str(length).encode('ascii') + b'\0')
139140
140141
141142 def serializable_property(name, docstring=None):
144145 def set(obj, value):
145146 setattr(obj, "_"+name, value)
146147 obj._needs_serialization = True
148
147149 def get(obj):
148150 return getattr(obj, "_"+name)
149151 return property(get, set, doc=docstring)
181183 email_start = identity.find(b'<')
182184 email_end = identity.find(b'>')
183185 if (email_start < 0 or email_end < 0 or email_end <= email_start
184 or identity.find(b'<', email_start + 1) >= 0
185 or identity.find(b'>', email_end + 1) >= 0
186 or not identity.endswith(b'>')):
186 or identity.find(b'<', email_start + 1) >= 0
187 or identity.find(b'>', email_end + 1) >= 0
188 or not identity.endswith(b'>')):
187189 raise ObjectFormatException(error_msg)
188190
189191
513515 def __cmp__(self, other):
514516 if not isinstance(other, ShaFile):
515517 raise TypeError
516 return cmp(self.id, other.id)
518 return cmp(self.id, other.id) # noqa: F821
517519
518520
519521 class Blob(ShaFile):
550552 def _deserialize(self, chunks):
551553 self._chunked_text = chunks
552554
553 chunked = property(_get_chunked, _set_chunked,
555 chunked = property(
556 _get_chunked, _set_chunked,
554557 "The text within the blob object, as chunks (not necessarily lines).")
555558
556559 @classmethod
712715 chunks.append(git_line(_TAGGER_HEADER, self._tagger))
713716 else:
714717 chunks.append(git_line(
715 _TAGGER_HEADER, self._tagger, str(self._tag_time).encode('ascii'),
716 format_timezone(self._tag_timezone, self._tag_timezone_neg_utc)))
718 _TAGGER_HEADER, self._tagger,
719 str(self._tag_time).encode('ascii'),
720 format_timezone(
721 self._tag_timezone, self._tag_timezone_neg_utc)))
717722 if self._message is not None:
718 chunks.append(b'\n') # To close headers
723 chunks.append(b'\n') # To close headers
719724 chunks.append(self._message)
720725 return chunks
721726
746751 else:
747752 self._tagger = value[0:sep+1]
748753 try:
749 (timetext, timezonetext) = value[sep+2:].rsplit(b' ', 1)
754 (timetext, timezonetext) = (
755 value[sep+2:].rsplit(b' ', 1))
750756 self._tag_time = int(timetext)
751 self._tag_timezone, self._tag_timezone_neg_utc = \
752 parse_timezone(timezonetext)
757 self._tag_timezone, self._tag_timezone_neg_utc = (
758 parse_timezone(timezonetext))
753759 except ValueError as e:
754760 raise ObjectFormatException(e)
755761 elif field is None:
771777 object = property(_get_object, _set_object)
772778
773779 name = serializable_property("name", "The name of this tag")
774 tagger = serializable_property("tagger",
775 "Returns the name of the person who created this tag")
776 tag_time = serializable_property("tag_time",
777 "The creation timestamp of the tag. As the number of seconds "
778 "since the epoch")
779 tag_timezone = serializable_property("tag_timezone",
780 "The timezone that tag_time is in.")
780 tagger = serializable_property(
781 "tagger",
782 "Returns the name of the person who created this tag")
783 tag_time = serializable_property(
784 "tag_time",
785 "The creation timestamp of the tag. As the number of seconds "
786 "since the epoch")
787 tag_timezone = serializable_property(
788 "tag_timezone",
789 "The timezone that tag_time is in.")
781790 message = serializable_property(
782 "message", "The message attached to this tag")
791 "message", "The message attached to this tag")
783792
784793
785794 class TreeEntry(namedtuple('TreeEntry', ['path', 'mode', 'sha'])):
827836 :return: Serialized tree text as chunks
828837 """
829838 for name, mode, hexsha in items:
830 yield ("%04o" % mode).encode('ascii') + b' ' + name + b'\0' + hex_to_sha(hexsha)
839 yield (("%04o" % mode).encode('ascii') + b' ' + name +
840 b'\0' + hex_to_sha(hexsha))
831841
832842
833843 def sorted_tree_items(entries, name_order):
968978 except ValueError as e:
969979 raise ObjectFormatException(e)
970980 # TODO: list comprehension is for efficiency in the common (small)
971 # case; if memory efficiency in the large case is a concern, use a genexp.
981 # case; if memory efficiency in the large case is a concern, use a
982 # genexp.
972983 self._entries = dict([(n, (m, s)) for n, m, s in parsed_entries])
973984
974985 def check(self):
10681079 offset = -offset
10691080 else:
10701081 sign = '+'
1071 return ('%c%02d%02d' % (sign, offset / 3600, (offset / 60) % 60)).encode('ascii')
1082 return ('%c%02d%02d' %
1083 (sign, offset / 3600, (offset / 60) % 60)).encode('ascii')
10721084
10731085
10741086 def parse_commit(chunks):
11011113 elif field == _COMMITTER_HEADER:
11021114 committer, timetext, timezonetext = value.rsplit(b' ', 2)
11031115 commit_time = int(timetext)
1104 commit_info = (committer, commit_time, parse_timezone(timezonetext))
1116 commit_info = (
1117 committer, commit_time, parse_timezone(timezonetext))
11051118 elif field == _ENCODING_HEADER:
11061119 encoding = value
11071120 elif field == _MERGETAG_HEADER:
11471160
11481161 def _deserialize(self, chunks):
11491162 (self._tree, self._parents, author_info, commit_info, self._encoding,
1150 self._mergetag, self._gpgsig, self._message, self._extra) = (
1163 self._mergetag, self._gpgsig, self._message, self._extra) = (
11511164 parse_commit(chunks))
1152 (self._author, self._author_time, (self._author_timezone,
1153 self._author_timezone_neg_utc)) = author_info
1154 (self._committer, self._commit_time, (self._commit_timezone,
1155 self._commit_timezone_neg_utc)) = commit_info
1165 (self._author, self._author_time,
1166 (self._author_timezone, self._author_timezone_neg_utc)) = author_info
1167 (self._committer, self._commit_time,
1168 (self._commit_timezone, self._commit_timezone_neg_utc)) = commit_info
11561169
11571170 def check(self):
11581171 """Check this object for internal consistency.
11921205
11931206 def _serialize(self):
11941207 chunks = []
1195 tree_bytes = self._tree.id if isinstance(self._tree, Tree) else self._tree
1208 tree_bytes = (
1209 self._tree.id if isinstance(self._tree, Tree) else self._tree)
11961210 chunks.append(git_line(_TREE_HEADER, tree_bytes))
11971211 for p in self._parents:
11981212 chunks.append(git_line(_PARENT_HEADER, p))
11991213 chunks.append(git_line(
1200 _AUTHOR_HEADER, self._author, str(self._author_time).encode('ascii'),
1201 format_timezone(self._author_timezone,
1202 self._author_timezone_neg_utc)))
1214 _AUTHOR_HEADER, self._author,
1215 str(self._author_time).encode('ascii'),
1216 format_timezone(
1217 self._author_timezone, self._author_timezone_neg_utc)))
12031218 chunks.append(git_line(
1204 _COMMITTER_HEADER, self._committer, str(self._commit_time).encode('ascii'),
1219 _COMMITTER_HEADER, self._committer,
1220 str(self._commit_time).encode('ascii'),
12051221 format_timezone(self._commit_timezone,
12061222 self._commit_timezone_neg_utc)))
12071223 if self.encoding:
12501266 """Return extra settings of this commit."""
12511267 return self._extra
12521268
1253 extra = property(_get_extra,
1269 extra = property(
1270 _get_extra,
12541271 doc="Extra header fields not understood (presumably added in a "
12551272 "newer version of git). Kept verbatim so the object can "
12561273 "be correctly reserialized. For private commit metadata, use "
12571274 "pseudo-headers in Commit.message, rather than this field.")
12581275
1259 author = serializable_property("author",
1276 author = serializable_property(
1277 "author",
12601278 "The name of the author of the commit")
12611279
1262 committer = serializable_property("committer",
1280 committer = serializable_property(
1281 "committer",
12631282 "The name of the committer of the commit")
12641283
12651284 message = serializable_property(
12661285 "message", "The commit message")
12671286
1268 commit_time = serializable_property("commit_time",
1269 "The timestamp of the commit. As the number of seconds since the epoch.")
1270
1271 commit_timezone = serializable_property("commit_timezone",
1287 commit_time = serializable_property(
1288 "commit_time",
1289 "The timestamp of the commit. As the number of seconds since the "
1290 "epoch.")
1291
1292 commit_timezone = serializable_property(
1293 "commit_timezone",
12721294 "The zone the commit time is in")
12731295
1274 author_time = serializable_property("author_time",
1296 author_time = serializable_property(
1297 "author_time",
12751298 "The timestamp the commit was written. As the number of "
12761299 "seconds since the epoch.")
12771300
3636 """
3737 objectish = to_bytes(objectish)
3838 return repo[objectish]
39
40
41 def parse_tree(repo, treeish):
42 """Parse a string referring to a tree.
43
44 :param repo: A `Repo` object
45 :param treeish: A string referring to a tree
46 :return: A git object
47 :raise KeyError: If the object can not be found
48 """
49 treeish = to_bytes(treeish)
50 o = repo[treeish]
51 if o.type_name == b"commit":
52 return repo[o.tree]
53 return o
3954
4055
4156 def parse_ref(container, refspec):
92107 rh = parse_ref(rh_container, rh)
93108 except KeyError:
94109 # TODO: check force?
95 if not b"/" in rh:
110 if b"/" not in rh:
96111 rh = b"refs/heads/" + rh
97112 return (lh, rh, force)
98113
156171 :raise ValueError: If the range can not be parsed
157172 """
158173 committish = to_bytes(committish)
159 return repo[committish] # For now..
174 return repo[committish] # For now..
160175
161176
162177 # TODO: parse_path_in_tree(), which handles e.g. v1.0:Documentation
5252 import os
5353 import sys
5454
55 try:
56 import mmap
57 except ImportError:
58 has_mmap = False
59 else:
60 has_mmap = True
61
62 # For some reason the above try, except fails to set has_mmap = False for plan9
63