Compare commits

..

2 Commits

Author SHA1 Message Date
mediocregopher
1147264ff1 Comment by mediocregopher: Hi all, welcome to the welcome thread! :) The READ...
---
type: comment
comment: Hi all, welcome to the welcome thread! :) The README is a good place to start
  if you're new. Looking forward to hearing from everyone!
fingerprint: AGKKa5dQRgvJaSusKYdPT58OfebCcb8u2ab8tT7fWzU9
credentials:
- type: pgp_signature
  pub_key_id: 95C46FA6A41148AC
  body: iQIzBAABAgAdFiEEJ6tQKp6olvZKJ0lwlcRvpqQRSKwFAl6rR4oACgkQlcRvpqQRSKzcaRAArMNQ3v3zooQnZn3X44G0wrDngrxpYBtVOpfQIszPtWE5F/BlxCzzGa+5GyDg6w4TuoUPcv0boa7Klu4XWMZWO43YXgal+6A9i9I8jtsIBBdatdRKd10JlwUjEyb0gmQT+4Ili1FBTN7y3KiIXLzLPALZ3NELGURp3Lo7aYsVQUxNsHXDXbawYQQRz1yCN9CRMD1UEHvpEZPt7ohUSo2sYefWLsR6WVzTx4fJq6Nc4A/kStQ7iUvq3uI7rDA/w3foo2oFIxkYta9BpEe7ly+Dwmx0CNHjPi3EW2zeei9lQxA0fihiin/0XnN5Kncu+sNgzW7TC5BJEXoVWQcgrSLX9S2KWl92iaNWAUtPDzUZWyY+d1Qq0VDdd4ZZ5Lhmbi8oNbj4JRiK9OLG2EULmVOItH3OY5Z3Pqv3MEkXTavCYsLuLaE/umBzewWMcgx+yk0T9+0frdPbZ5X5wu2S1eToyf0cThRaUB1OG2jrHbo/62dFgmHouSnaX0r2pg0MhIfGA/knk51VLT+RWfBYNwnDhigqVvM8zUAUlPBzWNtdCOLIgTxcHufzqfJeTRt0n3vl2F7FO1LAfT00bun+8hMzSb2nslskfBKdACUfU6GGlSqZMqi5gixnuV+q9POWrNHpNiT3qrjAMw9qRiln4ohTz5X/VtmGsMaLF5zOwfSJgC0=
  account: mediocregopher
2020-05-02 13:47:44 -06:00
mediocregopher
f5584f1505 Create the welcome thread, and a README for it
---
type: change
description: Create the welcome thread, and a README for it
fingerprint: ACfbSiTJmQ04DduNlyf0kNvJgqhGkJC1osSEZ9kdO6+o
credentials:
- type: pgp_signature
  pub_key_id: 95C46FA6A41148AC
  body: iQIzBAABAgAdFiEEJ6tQKp6olvZKJ0lwlcRvpqQRSKwFAl6tzlUACgkQlcRvpqQRSKyTYxAAjfPI881Xu168EJmwi2by9QLcUlcYY+t5DmJaxtGB+WT7W9qcfZ3WOwzST+X4rBvoA8oTPnfI6PE2tuF9RPgRBSxn3JOALRH2VwqoY5fsuTOk5/BO1uukPZdycdDpYZRKpQZKC8kzt3KwYskRR4CoxVroqmAzxEVba4dZTAXprov724cU7QXWXjOtU2iX0JNn/S/yX3L3g1v4sOVbaaUmif4aOLntx+7E2R7v28aBg0HL2uTgSs5nsHLXXfdRcm1CFmGzX8FNAChHkpUg9OdDpd5+mqBf7ymKBWuv0z+I2qe6xTPAshcMm3EWfbUpb1+Bux7UpywwZnz97HvdopFnKaHAfbv99Sfm/OqzgMeLClWv3Iysm1k5PcXobvfs2E9MUfIjG085jTZ0cq0OPqGhODkBOVHyn4Cm71ZMELt9yAkihxKLHjkp3J0WwQv0HbEieA0fE6Czmc481oTd0kGlDWTla/LMd3/vU4Gpx89Y9+2lTV0WaXoAawjJmEXQwqSCiPHYSnfAWgjDTkEAkNMHN8HgMYsVxhtipayvYPiWJFRhL5LVKGNgwUefTfhhhvrx1FBza5sF06XB7vKbb3npvZrfm2faLi1eyFX2xIl7m7dY6C4XYr3CBgEPiBh/NiCaZiOtjxOkzrJ/bsWNGolURMhNt9NAWFms8Nz5bXnRwZQ=
  account: mediocregopher
2020-05-02 13:47:34 -06:00
63 changed files with 69 additions and 8200 deletions

View File

@ -4,3 +4,17 @@ accounts:
signifiers: signifiers:
- type: pgp_public_key_file - type: pgp_public_key_file
path: ".dehub/mediocregopher.asc" path: ".dehub/mediocregopher.asc"
access_controls:
- action: allow
filters:
- type: branch
pattern: public/welcome
- type: payload_type
payload_type: comment
- type: not
filter:
type: commit_attributes
non_fast_forward: true
- type: signature
any: true

2
.gitignore vendored
View File

@ -1,3 +1 @@
/dehub /dehub
/git-http-server
/cmd/git-http-server/git-http-server

View File

@ -1,43 +0,0 @@
FROM golang:1.14
WORKDIR /go/src/dehub
COPY . .
RUN CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o /usr/bin/dehub ./cmd/dehub
WORKDIR /go/src/dehub/cmd/git-http-server
RUN CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o /usr/bin/git-http-server .
FROM debian:jessie
# Setup Container
VOLUME ["/repos"]
EXPOSE 80
# Setup APT
RUN echo 'debconf debconf/frontend select Noninteractive' | debconf-set-selections
# Update, Install Prerequisites, Clean Up APT
RUN DEBIAN_FRONTEND=noninteractive apt-get -y update && \
apt-get -y install git wget nginx-full fcgiwrap && \
apt-get clean
# Setup Container User
RUN useradd -M -s /bin/false git --uid 1000
# Setup nginx fcgi services to run as user git, group git
RUN sed -i 's/FCGI_USER="www-data"/FCGI_USER="git"/g' /etc/init.d/fcgiwrap && \
sed -i 's/FCGI_GROUP="www-data"/FCGI_GROUP="git"/g' /etc/init.d/fcgiwrap && \
sed -i 's/FCGI_SOCKET_OWNER="www-data"/FCGI_SOCKET_OWNER="git"/g' /etc/init.d/fcgiwrap && \
sed -i 's/FCGI_SOCKET_GROUP="www-data"/FCGI_SOCKET_GROUP="git"/g' /etc/init.d/fcgiwrap
# Copy binaries
COPY --from=0 /usr/bin/dehub /usr/bin/dehub
COPY --from=0 /usr/bin/git-http-server /usr/bin/git-http-server
# Create config files for container startup and nginx
COPY cmd/dehub-remote/nginx.conf /etc/nginx/nginx.conf
# Create start.sh
COPY cmd/dehub-remote/start.sh /start.sh
RUN chmod +x /start.sh
ENTRYPOINT ["/start.sh"]

142
README.md
View File

@ -1,104 +1,72 @@
# dehub # Welcome!
dehub aims to provide all the features of a git hosting platform, but without Hello! Welcome to the dehub project. You've found your way onto the welcome
the hosting part. These features include: branch. This branch is open for anyone to leave a comment commit on it, provided
they sign their commit with a PGP key.
**User management** - Authentication that commits come from the user they say ## Viewing comments
they do, and fine-grained control over which users can do what.
**Pull requests and issues** - Facilitation of discussion via comment commits, If you've gotten this far then viewing comments is as easy as doing `git log`.
and fine-grained (down to the file level) sign-off requirements. All commits will be shown from newest to oldest. You will only see the latest
snapshot of comments that you've pulled from the server. In order to update that
**Tags and releases** - Mark releases in the repo itself, and provide snapshot do:
immutable and verifiable git tags so there's never any funny business. (Not yet
implemented)
**Plugins**: Extend all aspects of dehub functionality via executables managed
in the repo itself (in the same style as git hooks). (Not yet implemented)
## Key Concepts
To implement these features, dehub combines two key concepts:
First, repo configuration is defined in the repo itself. A file called
`.dehub/config.yml` contains all information related to user accounts, their pgp
keys, branch and file level access controls, and more. Every commit must adhere
to the configuration of its parent in order to be considered _verifiable_. The
configuration file is committed to the repo like any other file would be, and so
is even able to define the access controls on itself.
Second, the commit message of every dehub commit contains a YAML encoded
payload, which allows dehub to extend git and provide multiple commit types,
each with its own capabilities and restrictions. Some example dehub commit types
are `change` commits, `comment` commits, and `credential` commits.
## Infrastructure (or lack thereof)
Because a dehub project is entirely housed within a traditional git project,
which is merely a collection of files, any existing git or network filesystem
infrastructure can be used to host any dehub project:
* The most barebones [git
daemon](https://git-scm.com/book/en/v2/Git-on-the-Server-Git-Daemon) server
(with a simple pre-receive hook set up).
* A remote SSH endpoint.
* A mailing list (aka the old school way).
* Network file syncing utilities such as dropbox,
[syncthing](https://github.com/syncthing/syncthing), or
[NFS](https://en.wikipedia.org/wiki/Network_File_System).
* Existing git project hosts like GitHub, Bitbucket, or Keybase.
* Decentralized filesystems such as IPFS. (Not yet implemented)
## Getting Started {#getting-started}
The dehub project itself can be found by cloning
`https://dehub.dev/src/dehub.git`.
Installation of the dehub tool is currently done via the `go get` command:
``` ```
go get -u -v dehub.dev/src/dehub.git/cmd/dehub git pull -f origin public/welcome
``` ```
This will install the binary to your `$GOBIN` path, which you'll want to put in ## Leaving a comment
your `$PATH`. Run `go env` if you're not sure where your `$GOBIN` is.
Once installed, running `dehub -h` should show you the help output of the The first step to leaving a comment of your own is to install dehub. Visit
command. You can continue on to the tutorials if you're not sure where to go `https://dehub.dev` for more on how to do that.
from here.
### Tutorials {#tutorials} Once done, and assuming you have this branch checked out (how are you reading
this if you don't?), just do the following:
The following tutorials will guide you through the basic usage of dehub. Note ```
that dehub is in the infancy of its development, and so a certain level of dehub commit --anon-pgp-key=KEY_NAME comment
profiency with git and PGP is required in order to follow these tutorials. ```
* [Tutorial 0: Say Hello!](/docs/tut0.html) (`KEY_NAME` should be replaced with any selector which will match your pgp key,
* [Tutorial 1: Create Your Own Project](/docs/tut1.html) such as the key ID, the name on the key, or the email.)
* [Tutorial 2: Access Controls](/docs/tut2.html)
* [Tutorial 3: Commit Sign-Off](/docs/tut3.html)
### Documentation Your default text editor (defined by the `EDITOR` environment variable) will pop
up and you can then write down your comment. When you save and close your editor
dehub will sign the comment with your pgp key and create a commit with it.
The [SPEC](/docs/SPEC.html) is the best place to see every possible nitty-gritty You can view your newly created commit by calling `git show`.
detail of how dehub works. It attempts to be both human-readable and exhaustive
in its coverage.
### Other links If after you've created your comment commit (but before you've pushed it) you'd
like to amend it, do:
[ROADMAP](/docs/ROADMAP.html) documents upcoming features and other work ```
required on the project. If you're looking to contribute, this is a great place dehub commit --anon-pgp-key=KEY_NAME comment --amend
to start. ```
[dehub-remote](/cmd/dehub-remote/) is a simple docker image which can be used to Finally, to push your comment commit up, you can do:
host a remote dehub project over http(s). The endpoint will automatically verify
all pushed commits. ```
git push origin public/welcome
```
Once pushed, everyone will be able to see your comment!
### What to say?
Here's some starting points if you're not sure what to write in your first
comment:
* Introduce yourself; say where you're from and what your interests are.
* How did you find dehub? Why is it interesting to you?
* If you're using dehub for a project, shill your project!
* If you'd like to get involved in dehub's development, let us know what your
skills are and how you can help. Remember, it takes more than expert
programmers to make a project successful.
## Rules
Please be kind to others, and keep discussion related to dehub and
dehub-adjacent topics. Politics, in general, is not going to be related to
dehub. Comments which are off-topic or otherwise abusive are subject to being
removed.
[git-http-server](/cmd/git-http-server/) is a small server which makes a git
repo's file tree available via http. It will automatically render markdown files
to html as well. git-http-server is used to render dehub's website.

View File

@ -1,155 +0,0 @@
// Package accessctl implements functionality related to allowing or denying
// actions in a repo based on who is taking what actions.
package accessctl
import (
"errors"
"fmt"
"dehub.dev/src/dehub.git/sigcred"
yaml "gopkg.in/yaml.v2"
)
// DefaultAccessControlsStr is the encoded form of the default access control
// set which is applied to all CommitRequests if no user-supplied ones match.
//
// The effect of these AccessControls is to allow all commit types on any branch
// (with the exception of the main branch, which only allows change commits), as
// long as the commit has one signature from a configured account.
var DefaultAccessControlsStr = `
- action: allow
filters:
- type: not
filter:
type: branch
pattern: main
- type: signature
any_account: true
count: 1
- action: deny
filters:
- type: commit_attributes
non_fast_forward: true
- action: allow
filters:
- type: branch
pattern: main
- type: payload_type
payload_type: change
- type: signature
any_account: true
count: 1
- action: deny
`
// DefaultAccessControls is the decoded form of DefaultAccessControlsStr.
var DefaultAccessControls = func() []AccessControl {
var acl []AccessControl
if err := yaml.Unmarshal([]byte(DefaultAccessControlsStr), &acl); err != nil {
panic(err)
}
return acl
}()
// CommitRequest is used to describe a set of interactions which are being
// requested to be performed.
type CommitRequest struct {
// Type describes what type of commit is being requested. Possibilities are
// determined by the requester.
Type string
// Branch is the name of the branch the interactions are being attempted on.
// It is required.
Branch string
// Credentials are the credentials attached to the commit.
Credentials []sigcred.CredentialUnion
// FilesChanged is the set of file paths (relative to the repo root) which
// have been modified in some way.
FilesChanged []string
// NonFastForward should be set to true if the branch HEAD and this commit
// are not directly related (i.e. neither is a direct ancestor of the
// other).
NonFastForward bool
}
// Action describes what action an AccessControl should perform
// when given a CommitRequest.
type Action string
// Enumerates possible Action values
const (
ActionAllow Action = "allow"
ActionDeny Action = "deny"
// ActionNext is used internally when a request does not match an
// AccessControl's filters. It _could_ be used in the Config as well, but it
// would be pretty pointless to do so, so we don't talk about it.
ActionNext Action = "next"
)
// AccessControl describes a set of Filters, and the Actions which should be
// taken on a CommitRequest if those Filters all match on the CommitRequest.
type AccessControl struct {
Action Action `yaml:"action"`
Filters []FilterUnion `yaml:"filters"`
}
// ActionForCommit returns what Action this AccessControl says to take for a
// given CommitRequest. It may return ActionNext if the request is not matched
// by the AccessControl's Filters.
func (ac AccessControl) ActionForCommit(req CommitRequest) (Action, error) {
for _, filterUn := range ac.Filters {
if err := filterUn.Filter().MatchCommit(req); errors.As(err, new(ErrFilterNoMatch)) {
return ActionNext, nil
} else if err != nil {
return "", fmt.Errorf("matching commit using filter of type %q: %w", filterUn.Type(), err)
}
}
return ac.Action, nil
}
// ErrCommitRequestDenied is returned from AssertCanCommit when a particular
// AccessControl has explicitly disallowed the CommitRequest.
type ErrCommitRequestDenied struct {
By AccessControl
}
func (e ErrCommitRequestDenied) Error() string {
acB, err := yaml.Marshal(e.By)
if err != nil {
panic(err)
}
return fmt.Sprintf("commit matched and denied by this access control:\n%s", string(acB))
}
// AssertCanCommit asserts that the given CommitRequest is allowed by the given
// AccessControls.
func AssertCanCommit(acl []AccessControl, req CommitRequest) error {
acl = append(acl, DefaultAccessControls...)
for _, ac := range acl {
action, err := ac.ActionForCommit(req)
if err != nil {
return err
}
switch action {
case ActionNext:
continue
case ActionAllow:
return nil
case ActionDeny:
return ErrCommitRequestDenied{By: ac}
default:
return fmt.Errorf("invalid action %q", action)
}
}
panic("should not be able to get here")
}

View File

@ -1,145 +0,0 @@
package accessctl
import (
"errors"
"testing"
"dehub.dev/src/dehub.git/sigcred"
)
func TestAssertCanCommit(t *testing.T) {
tests := []struct {
descr string
acl []AccessControl
req CommitRequest
allowed bool
}{
{
descr: "first allows",
acl: []AccessControl{
{
Action: ActionAllow,
Filters: []FilterUnion{{
PayloadType: &FilterPayloadType{Type: "foo"},
}},
},
{
Action: ActionDeny,
Filters: []FilterUnion{{
PayloadType: &FilterPayloadType{Type: "foo"},
}},
},
},
req: CommitRequest{Type: "foo"},
allowed: true,
},
{
descr: "first denies",
acl: []AccessControl{
{
Action: ActionDeny,
Filters: []FilterUnion{{
PayloadType: &FilterPayloadType{Type: "foo"},
}},
},
{
Action: ActionAllow,
Filters: []FilterUnion{{
PayloadType: &FilterPayloadType{Type: "foo"},
}},
},
},
req: CommitRequest{Type: "foo"},
allowed: false,
},
{
descr: "second allows",
acl: []AccessControl{
{
Action: ActionDeny,
Filters: []FilterUnion{{
PayloadType: &FilterPayloadType{Type: "bar"},
}},
},
{
Action: ActionAllow,
Filters: []FilterUnion{{
PayloadType: &FilterPayloadType{Type: "foo"},
}},
},
},
req: CommitRequest{Type: "foo"},
allowed: true,
},
{
descr: "second denies",
acl: []AccessControl{
{
Action: ActionDeny,
Filters: []FilterUnion{{
PayloadType: &FilterPayloadType{Type: "bar"},
}},
},
{
Action: ActionDeny,
Filters: []FilterUnion{{
PayloadType: &FilterPayloadType{Type: "foo"},
}},
},
},
req: CommitRequest{Type: "foo"},
allowed: false,
},
{
descr: "default allows",
acl: []AccessControl{
{
Action: ActionDeny,
Filters: []FilterUnion{{
PayloadType: &FilterPayloadType{Type: "bar"},
}},
},
},
req: CommitRequest{
Branch: "not_main",
Type: "foo",
Credentials: []sigcred.CredentialUnion{{
PGPSignature: new(sigcred.CredentialPGPSignature),
AccountID: "a",
}},
},
allowed: true,
},
{
descr: "default denies",
acl: []AccessControl{
{
Action: ActionDeny,
Filters: []FilterUnion{{
PayloadType: &FilterPayloadType{Type: "bar"},
}},
},
},
req: CommitRequest{
Branch: "main",
Type: "foo",
Credentials: []sigcred.CredentialUnion{{
PGPSignature: new(sigcred.CredentialPGPSignature),
AccountID: "a",
}},
},
allowed: false,
},
}
for _, test := range tests {
t.Run(test.descr, func(t *testing.T) {
err := AssertCanCommit(test.acl, test.req)
if test.allowed && err != nil {
t.Fatalf("expected to be allowed but got: %v", err)
} else if !test.allowed && !errors.As(err, new(ErrCommitRequestDenied)) {
t.Fatalf("expected to be denied but got: %v", err)
}
})
}
}

View File

@ -1,124 +0,0 @@
package accessctl
import (
"errors"
"fmt"
"dehub.dev/src/dehub.git/typeobj"
)
// ErrFilterNoMatch is returned from a FilterInterface's Match method when the
// given request was not matched to the filter due to the request itself (as
// opposed to some error in the filter's definition).
type ErrFilterNoMatch struct {
Err error
}
func (err ErrFilterNoMatch) Error() string {
return fmt.Sprintf("matching with filter: %s", err.Err.Error())
}
// Filter describes the methods that all Filters must implement.
type Filter interface {
// MatchCommit returns nil if the CommitRequest is matched by the filter,
// otherwise it returns an error (ErrFilterNoMatch if the error is due to
// the CommitRequest).
MatchCommit(CommitRequest) error
}
// FilterUnion represents an access control filter being defined in the Config.
// Only one of its fields may be filled at a time.
type FilterUnion struct {
Signature *FilterSignature `type:"signature"`
Branch *FilterBranch `type:"branch"`
FilesChanged *FilterFilesChanged `type:"files_changed"`
PayloadType *FilterPayloadType `type:"payload_type"`
CommitAttributes *FilterCommitAttributes `type:"commit_attributes"`
Not *FilterNot `type:"not"`
}
// MarshalYAML implements the yaml.Marshaler interface.
func (f FilterUnion) MarshalYAML() (interface{}, error) {
return typeobj.MarshalYAML(f)
}
// UnmarshalYAML implements the yaml.Unmarshaler interface.
func (f *FilterUnion) UnmarshalYAML(unmarshal func(interface{}) error) error {
return typeobj.UnmarshalYAML(f, unmarshal)
}
// Filter returns the Filter encapsulated by this FilterUnion.
//
// This method will panic if a Filter field is not populated.
func (f FilterUnion) Filter() Filter {
el, _, err := typeobj.Element(f)
if err != nil {
panic(err)
}
return el.(Filter)
}
// Type returns the Filter's type (as would be used in its YAML "type" field).
//
// This will panic if a Filter field is not populated.
func (f FilterUnion) Type() string {
_, typeStr, err := typeobj.Element(f)
if err != nil {
panic(err)
}
return typeStr
}
// FilterPayloadType filters by what type of payload is being requested. Exactly
// one of its fields should be filled.
type FilterPayloadType struct {
Type string `yaml:"payload_type"`
Types []string `yaml:"payload_types"`
}
var _ Filter = FilterPayloadType{}
// MatchCommit implements the method for FilterInterface.
func (f FilterPayloadType) MatchCommit(req CommitRequest) error {
switch {
case f.Type != "":
if f.Type != req.Type {
return ErrFilterNoMatch{
Err: fmt.Errorf("payload type %q does not match filter's type %q",
req.Type, f.Type),
}
}
return nil
case len(f.Types) > 0:
for _, typ := range f.Types {
if typ == req.Type {
return nil
}
}
return ErrFilterNoMatch{
Err: fmt.Errorf("payload type %q does not match any of filter's types %+v",
req.Type, f.Types),
}
default:
return errors.New(`one of the following fields must be set: "payload_type", "payload_types"`)
}
}
// FilterCommitAttributes filters by one more attributes a commit can have. If
// more than one field is filled in then all relevant attributes must be present
// on the commit for this filter to match.
type FilterCommitAttributes struct {
NonFastForward bool `yaml:"non_fast_forward"`
}
var _ Filter = FilterCommitAttributes{}
// MatchCommit implements the method for FilterInterface.
func (f FilterCommitAttributes) MatchCommit(req CommitRequest) error {
if f.NonFastForward && !req.NonFastForward {
return ErrFilterNoMatch{Err: errors.New("commit is a fast-forward")}
}
return nil
}

View File

@ -1,26 +0,0 @@
package accessctl
import (
"errors"
)
// FilterNot wraps another Filter. If that filter matches, FilterNot does not
// match, and vice-versa.
type FilterNot struct {
Filter FilterUnion `yaml:"filter"`
}
var _ Filter = FilterNot{}
// MatchCommit implements the method for FilterInterface.
func (f FilterNot) MatchCommit(req CommitRequest) error {
if err := f.Filter.Filter().MatchCommit(req); errors.As(err, new(ErrFilterNoMatch)) {
return nil
} else if err != nil {
return err
}
return ErrFilterNoMatch{Err: errors.New("sub-filter did match")}
}
// TODO FilterAll
// TODO FilterAny

View File

@ -1,32 +0,0 @@
package accessctl
import "testing"
func TestFilterNot(t *testing.T) {
runCommitMatchTests(t, []filterCommitMatchTest{
{
descr: "sub-filter does match",
filter: FilterNot{
Filter: FilterUnion{
PayloadType: &FilterPayloadType{Type: "foo"},
},
},
req: CommitRequest{
Type: "foo",
},
match: false,
},
{
descr: "sub-filter does not match",
filter: FilterNot{
Filter: FilterUnion{
PayloadType: &FilterPayloadType{Type: "foo"},
},
},
req: CommitRequest{
Type: "bar",
},
match: true,
},
})
}

View File

@ -1,96 +0,0 @@
package accessctl
import (
"errors"
"fmt"
"github.com/bmatcuk/doublestar"
)
// StringMatcher is used to match against a string. It can use one of several
// methods to match. Only one field should be filled at a time.
type StringMatcher struct {
// Pattern, if set, indicates that the Match method should succeed if this
// doublestar pattern matches against the string.
Pattern string `yaml:"pattern,omitempty"`
// Patterns, if set, indicates that the Match method should succeed if at
// least one of these doublestar patterns matches against the string.
Patterns []string `yaml:"patterns,omitempty"`
}
func doublestarMatch(pattern, str string) (bool, error) {
ok, err := doublestar.Match(pattern, str)
if err != nil {
return false, fmt.Errorf("matching %q on pattern %q: %w",
str, pattern, err)
}
return ok, nil
}
// Match operates similarly to the Match method of the FilterInterface, except
// it only takes in strings.
func (m StringMatcher) Match(str string) error {
switch {
case m.Pattern != "":
if ok, err := doublestarMatch(m.Pattern, str); err != nil {
return err
} else if !ok {
return ErrFilterNoMatch{
Err: fmt.Errorf("pattern %q does not match %q", m.Pattern, str),
}
}
return nil
case len(m.Patterns) > 0:
for _, pattern := range m.Patterns {
if ok, err := doublestarMatch(pattern, str); err != nil {
return err
} else if ok {
return nil
}
}
return ErrFilterNoMatch{
Err: fmt.Errorf("no patterns in %+v match %q", m.Patterns, str),
}
default:
return errors.New(`one of the following fields must be set: "pattern", "patterns"`)
}
}
// FilterBranch matches a CommitRequest's Branch field using a double-star
// pattern.
type FilterBranch struct {
StringMatcher StringMatcher `yaml:",inline"`
}
var _ Filter = FilterBranch{}
// MatchCommit implements the method for FilterInterface.
func (f FilterBranch) MatchCommit(req CommitRequest) error {
return f.StringMatcher.Match(req.Branch)
}
// FilterFilesChanged matches a CommitRequest's FilesChanged field using a
// double-star pattern. It only matches if all of the CommitRequest's
// FilesChanged match.
type FilterFilesChanged struct {
StringMatcher StringMatcher `yaml:",inline"`
}
var _ Filter = FilterFilesChanged{}
// MatchCommit implements the method for FilterInterface.
func (f FilterFilesChanged) MatchCommit(req CommitRequest) error {
for _, path := range req.FilesChanged {
if err := f.StringMatcher.Match(path); errors.As(err, new(ErrFilterNoMatch)) {
continue
} else if err != nil {
return err
}
return nil
}
return ErrFilterNoMatch{Err: errors.New("no paths matched")}
}

View File

@ -1,199 +0,0 @@
package accessctl
import (
"errors"
"testing"
)
func TestStringMatcher(t *testing.T) {
tests := []struct {
descr string
matcher StringMatcher
str string
match bool
}{
// Pattern
{
descr: "pattern exact match",
matcher: StringMatcher{
Pattern: "foo",
},
str: "foo",
match: true,
},
{
descr: "pattern exact no match",
matcher: StringMatcher{
Pattern: "foo",
},
str: "bar",
match: false,
},
{
descr: "pattern single star match",
matcher: StringMatcher{
Pattern: "foo/*",
},
str: "foo/bar",
match: true,
},
{
descr: "pattern single star no match 1",
matcher: StringMatcher{
Pattern: "foo/*",
},
str: "foo",
match: false,
},
{
descr: "pattern single star no match 2",
matcher: StringMatcher{
Pattern: "foo/*",
},
str: "foo/bar/baz",
match: false,
},
{
descr: "pattern double star match 1",
matcher: StringMatcher{
Pattern: "foo/**",
},
str: "foo/bar",
match: true,
},
{
descr: "pattern double star match 2",
matcher: StringMatcher{
Pattern: "foo/**",
},
str: "foo/bar/baz",
match: true,
},
{
descr: "pattern double star no match",
matcher: StringMatcher{
Pattern: "foo/**",
},
str: "foo",
match: false,
},
// Patterns, assumes individual pattern matching works correctly
{
descr: "patterns single match",
matcher: StringMatcher{
Patterns: []string{"foo"},
},
str: "foo",
match: true,
},
{
descr: "patterns single no match",
matcher: StringMatcher{
Patterns: []string{"foo"},
},
str: "bar",
match: false,
},
{
descr: "patterns multi first match",
matcher: StringMatcher{
Patterns: []string{"foo", "bar"},
},
str: "foo",
match: true,
},
{
descr: "patterns multi second match",
matcher: StringMatcher{
Patterns: []string{"foo", "bar"},
},
str: "bar",
match: true,
},
{
descr: "patterns multi no match",
matcher: StringMatcher{
Patterns: []string{"foo", "bar"},
},
str: "baz",
match: false,
},
}
for _, test := range tests {
t.Run(test.descr, func(t *testing.T) {
err := test.matcher.Match(test.str)
if test.match && err != nil {
t.Fatalf("expected to match, got %v", err)
} else if !test.match && !errors.As(err, new(ErrFilterNoMatch)) {
t.Fatalf("expected ErrFilterNoMatch, got %#v", err)
}
})
}
}
func TestFilterFilesChanged(t *testing.T) {
mkReq := func(paths ...string) CommitRequest {
return CommitRequest{FilesChanged: paths}
}
runCommitMatchTests(t, []filterCommitMatchTest{
{
descr: "no paths",
filter: FilterFilesChanged{
StringMatcher: StringMatcher{Pattern: "foo"},
},
req: mkReq(),
match: false,
},
{
descr: "all paths against one pattern",
filter: FilterFilesChanged{
StringMatcher: StringMatcher{Pattern: "foo/*"},
},
req: mkReq("foo/bar", "foo/baz"),
match: true,
},
{
descr: "all paths against multiple patterns",
filter: FilterFilesChanged{
StringMatcher: StringMatcher{Patterns: []string{"foo", "bar"}},
},
req: mkReq("foo", "bar"),
match: true,
},
{
descr: "some paths against one pattern",
filter: FilterFilesChanged{
StringMatcher: StringMatcher{Pattern: "foo"},
},
req: mkReq("foo", "bar"),
match: true,
},
{
descr: "some paths against many patterns",
filter: FilterFilesChanged{
StringMatcher: StringMatcher{Patterns: []string{"foo", "bar"}},
},
req: mkReq("foo", "baz"),
match: true,
},
{
descr: "no paths against one pattern",
filter: FilterFilesChanged{
StringMatcher: StringMatcher{Pattern: "foo"},
},
req: mkReq("baz", "buz"),
match: false,
},
{
descr: "no paths against many patterns",
filter: FilterFilesChanged{
StringMatcher: StringMatcher{Patterns: []string{"foo", "bar"}},
},
req: mkReq("baz", "buz"),
match: false,
},
})
}

View File

@ -1,113 +0,0 @@
package accessctl
import (
"errors"
"fmt"
"math"
"strconv"
"strings"
)
// FilterSignature represents the configuration of a Filter which requires one
// or more signature credentials to be present on a commit.
//
// Either AccountIDs, AnyAccount, or Any must be filled in; all are mutually
// exclusive.
type FilterSignature struct {
AccountIDs []string `yaml:"account_ids,omitempty"`
Any bool `yaml:"any,omitempty"`
AnyAccount bool `yaml:"any_account,omitempty"`
Count string `yaml:"count,omitempty"`
}
var _ Filter = FilterSignature{}
func (f FilterSignature) targetNum() (int, error) {
if f.Count == "" {
return 1, nil
} else if !strings.HasSuffix(f.Count, "%") {
return strconv.Atoi(f.Count)
} else if f.AnyAccount {
return 0, errors.New("cannot use AnyAccount and a percent Count together")
}
percentStr := strings.TrimRight(f.Count, "%")
percent, err := strconv.ParseFloat(percentStr, 64)
if err != nil {
return 0, fmt.Errorf("could not parse Count as percent %q: %w", f.Count, err)
}
target := float64(len(f.AccountIDs)) * percent / 100
target = math.Ceil(target)
return int(target), nil
}
// ErrFilterSignatureUnsatisfied is returned from FilterSignature's
// Match method when the filter has not been satisfied.
type ErrFilterSignatureUnsatisfied struct {
TargetNumAccounts, NumAccounts int
}
func (err ErrFilterSignatureUnsatisfied) Error() string {
return fmt.Sprintf("not enough valid signature credentials, filter requires %d but only had %d",
err.TargetNumAccounts, err.NumAccounts)
}
// MatchCommit returns true if the CommitRequest contains a sufficient number of
// signature Credentials.
func (f FilterSignature) MatchCommit(req CommitRequest) error {
targetN, err := f.targetNum()
if err != nil {
return fmt.Errorf("computing target number of accounts: %w", err)
}
var numSigs int
credAccountIDs := map[string]struct{}{}
for _, cred := range req.Credentials {
// TODO support other kinds of signatures
if cred.PGPSignature == nil {
continue
}
numSigs++
if cred.AccountID != "" {
credAccountIDs[cred.AccountID] = struct{}{}
}
}
if numSigs == 0 {
return ErrFilterNoMatch{
Err: ErrFilterSignatureUnsatisfied{TargetNumAccounts: targetN},
}
}
var n int
if f.Any {
return nil
} else if f.AnyAccount {
// TODO this doesn't actually check that the accounts are defined in the
// Config. It works for now as long as the Credentials are valid, since
// only an Account defined in the Config could create a valid
// Credential, but once that's not the case this will need to be
// revisited.
n = len(credAccountIDs)
} else {
targetAccountIDs := map[string]struct{}{}
for _, accountID := range f.AccountIDs {
targetAccountIDs[accountID] = struct{}{}
}
for accountID := range targetAccountIDs {
if _, ok := credAccountIDs[accountID]; ok {
n++
}
}
}
if n >= targetN {
return nil
}
return ErrFilterNoMatch{
Err: ErrFilterSignatureUnsatisfied{
NumAccounts: n,
TargetNumAccounts: targetN,
},
}
}

View File

@ -1,124 +0,0 @@
package accessctl
import (
"testing"
"dehub.dev/src/dehub.git/sigcred"
)
func TestFilterSignature(t *testing.T) {
mkReq := func(accountIDs ...string) CommitRequest {
creds := make([]sigcred.CredentialUnion, len(accountIDs))
for i := range accountIDs {
creds[i].PGPSignature = new(sigcred.CredentialPGPSignature)
creds[i].AccountID = accountIDs[i]
}
return CommitRequest{Credentials: creds}
}
runCommitMatchTests(t, []filterCommitMatchTest{
{
descr: "no cred accounts",
filter: FilterSignature{
AnyAccount: true,
Count: "1",
},
matchErr: ErrFilterSignatureUnsatisfied{
TargetNumAccounts: 1,
NumAccounts: 0,
},
},
{
descr: "one cred account",
filter: FilterSignature{
AnyAccount: true,
Count: "1",
},
req: mkReq("foo"),
match: true,
},
{
descr: "one matching cred account",
filter: FilterSignature{
AccountIDs: []string{"foo", "bar"},
Count: "1",
},
req: mkReq("foo"),
match: true,
},
{
descr: "no matching cred account",
filter: FilterSignature{
AccountIDs: []string{"foo", "bar"},
Count: "1",
},
req: mkReq("baz"),
matchErr: ErrFilterSignatureUnsatisfied{
TargetNumAccounts: 1,
NumAccounts: 0,
},
},
{
descr: "two matching cred accounts",
filter: FilterSignature{
AccountIDs: []string{"foo", "bar"},
Count: "2",
},
req: mkReq("foo", "bar"),
match: true,
},
{
descr: "one matching cred account, missing one",
filter: FilterSignature{
AccountIDs: []string{"foo", "bar"},
Count: "2",
},
req: mkReq("foo", "baz"),
matchErr: ErrFilterSignatureUnsatisfied{
TargetNumAccounts: 2,
NumAccounts: 1,
},
},
{
descr: "50 percent matching cred accounts",
filter: FilterSignature{
AccountIDs: []string{"foo", "bar", "baz"},
Count: "50%",
},
req: mkReq("foo", "bar"),
match: true,
},
{
descr: "not 50 percent matching cred accounts",
filter: FilterSignature{
AccountIDs: []string{"foo", "bar", "baz"},
Count: "50%",
},
req: mkReq("foo"),
matchErr: ErrFilterSignatureUnsatisfied{
TargetNumAccounts: 2,
NumAccounts: 1,
},
},
{
descr: "any sig at all",
filter: FilterSignature{
Any: true,
},
req: CommitRequest{
Credentials: []sigcred.CredentialUnion{
{PGPSignature: new(sigcred.CredentialPGPSignature)},
},
},
match: true,
},
{
descr: "not any sig at all",
filter: FilterSignature{Any: true},
req: CommitRequest{},
matchErr: ErrFilterSignatureUnsatisfied{
TargetNumAccounts: 1,
},
},
})
}

View File

@ -1,137 +0,0 @@
package accessctl
import (
"errors"
"reflect"
"testing"
)
type filterCommitMatchTest struct {
descr string
filter Filter
req CommitRequest
match bool
// assumes match == false, and will ensure that the returned wrapped error
// is this one.
matchErr error
}
func runCommitMatchTests(t *testing.T, tests []filterCommitMatchTest) {
for _, test := range tests {
t.Run(test.descr, func(t *testing.T) {
err := test.filter.MatchCommit(test.req)
shouldMatch := test.match && test.matchErr == nil
if shouldMatch && err != nil {
t.Fatalf("expected to match, got %v", err)
} else if shouldMatch {
return
} else if fErr := new(ErrFilterNoMatch); !errors.As(err, fErr) {
t.Fatalf("expected ErrFilterNoMatch, got: %#v", err)
} else if test.matchErr != nil && !reflect.DeepEqual(fErr.Err, test.matchErr) {
t.Fatalf("expected err %#v, not %#v", test.matchErr, fErr.Err)
}
})
}
}
func TestFilterPayloadType(t *testing.T) {
mkReq := func(commitType string) CommitRequest {
return CommitRequest{Type: commitType}
}
runCommitMatchTests(t, []filterCommitMatchTest{
{
descr: "single match",
filter: FilterPayloadType{
Type: "foo",
},
req: mkReq("foo"),
match: true,
},
{
descr: "single no match",
filter: FilterPayloadType{
Type: "foo",
},
req: mkReq("bar"),
match: false,
},
{
descr: "multi match first",
filter: FilterPayloadType{
Types: []string{"foo", "bar"},
},
req: mkReq("foo"),
match: true,
},
{
descr: "multi match second",
filter: FilterPayloadType{
Types: []string{"foo", "bar"},
},
req: mkReq("bar"),
match: true,
},
{
descr: "multi no match",
filter: FilterPayloadType{
Types: []string{"foo", "bar"},
},
req: mkReq("baz"),
match: false,
},
})
}
func TestFilterCommitAttributes(t *testing.T) {
mkReq := func(nonFF bool) CommitRequest {
return CommitRequest{NonFastForward: nonFF}
}
runCommitMatchTests(t, []filterCommitMatchTest{
{
descr: "ff with empty filter",
filter: FilterCommitAttributes{},
req: mkReq(false),
match: true,
},
{
descr: "non-ff with empty filter",
filter: FilterCommitAttributes{},
req: mkReq(true),
match: true,
},
{
descr: "ff with non-ff filter",
filter: FilterCommitAttributes{NonFastForward: true},
req: mkReq(false),
match: false,
},
{
descr: "non-ff with non-ff filter",
filter: FilterCommitAttributes{NonFastForward: true},
req: mkReq(true),
match: true,
},
{
descr: "ff with inverted non-ff filter",
filter: FilterNot{Filter: FilterUnion{
CommitAttributes: &FilterCommitAttributes{NonFastForward: true},
}},
req: mkReq(false),
match: true,
},
{
descr: "non-ff with inverted non-ff filter",
filter: FilterNot{Filter: FilterUnion{
CommitAttributes: &FilterCommitAttributes{NonFastForward: true},
}},
req: mkReq(true),
match: false,
},
})
}

View File

@ -1,39 +0,0 @@
# dehub-remote
This directory provides a simple Docker image which can be spun up to run a
dehub-enabled git http remote server. Commits which are pushed to this server
will be automatically verified using `dehub verify`.
The docker image is also being hosted on docker hub at
[mediocregopher/dehub-remote][dehub-remote]. Proper image tagging/versioning
coming soon!
[dehub-remote]: https://hub.docker.com/repository/docker/mediocregopher/dehub-remote
## Usage
Running the following:
```
docker run \
--name dehub \
-v /opt/dehub/repos:/repos \
-p 8080:80 \
mediocregopher/dehub-remote repo-a.git repo-b.git
```
Will start an http server on port 8080, using `/opt/dehub/repos` to store all
repo folders. It will then initialize repo directories at
`/opt/dehub/repos/repo-a.git` and `/opt/dehub/repos/repo-b.git`, if they arent
already there.
## Extras
For convenience the docker image also includes the
[git-http-server](../git-http-server/) binary.
## Contributors
The Dockerfile being used is based on
[gitbox](https://github.com/nmarus/docker-gitbox), so thank you to nmarus for
the great work there.

View File

@ -1,44 +0,0 @@
user git git;
worker_processes 1;
pid /run/nginx.pid;
events {
worker_connections 1024;
}
http {
sendfile on;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 15;
types_hash_max_size 2048;
include /etc/nginx/mime.types;
default_type application/octet-stream;
access_log /var/log/nginx/access.log;
error_log /var/log/nginx/error.log;
server_names_hash_bucket_size 64;
server {
listen 80;
server_name MYSERVER default;
root /var/www;
access_log /var/log/nginx/MYSERVER.access.log combined;
error_log /var/log/nginx/MYSERVER.error.log error;
#git SMART HTTP
location / {
client_max_body_size 0;
fastcgi_param SCRIPT_FILENAME /usr/lib/git-core/git-http-backend;
fastcgi_param GIT_HTTP_EXPORT_ALL "";
fastcgi_param GIT_PROJECT_ROOT /repos;
fastcgi_param PATH_INFO $uri;
include /etc/nginx/fastcgi_params;
fastcgi_pass unix:/var/run/fcgiwrap.socket;
}
}
}

View File

@ -1,86 +0,0 @@
#!/bin/bash
set -e
QUIET=false
#SFLOG="/start.log"
#print timestamp
timestamp() {
date +"%Y-%m-%d %T"
}
#screen/file logger
sflog() {
#if $1 is not null
if [ ! -z ${1+x} ]; then
message=$1
else
#exit function
return 1;
fi
#if $QUIET is not true
if ! $($QUIET); then
echo "${message}"
fi
#if $SFLOG is not null
if [ ! -z ${SFLOG+x} ]; then
#if $2 is regular file or does not exist
if [ -f ${SFLOG} ] || [ ! -e ${SFLOG} ]; then
echo "$(timestamp) ${message}" >> ${SFLOG}
fi
fi
}
#start services function
startc() {
sflog "Services for container are being started..."
/etc/init.d/fcgiwrap start > /dev/null
/etc/init.d/nginx start > /dev/null
sflog "The container services have started..."
}
#stop services function
stopc() {
sflog "Services for container are being stopped..."
/etc/init.d/nginx stop > /dev/null
/etc/init.d/fcgiwrap stop > /dev/null
sflog "Services for container have successfully stopped. Exiting."
exit 0
}
#trap "docker stop <container>" and shuts services down cleanly
trap "(stopc)" TERM INT
#startup
#test for ENV varibale $FQDN
if [ ! -z ${FQDN+x} ]; then
sflog "FQDN is set to ${FQDN}"
else
export FQDN=dehub
sflog "FQDN is set to ${FQDN}"
fi
#modify config files with fqdn
sed -i "s,MYSERVER,${FQDN},g" /etc/nginx/nginx.conf &> /dev/null
# create the individual repo directories
while [ ! -z "$1" ]; do
dir="/repos/$1";
if [ ! -d "$dir" ]; then
echo "Initializing repo $1"
mkdir "$dir"
dehub init -path "$dir" -bare -remote
chown -R git:git "$dir"
fi
shift
done
#start init.d services
startc
#pause script to keep container running...
sflog "Services for container successfully started."
sflog "Dumping logs"
tail -f /var/log/nginx/*.log

View File

@ -1,273 +0,0 @@
package main
import (
"context"
"errors"
"fmt"
"dehub.dev/src/dehub.git"
"dehub.dev/src/dehub.git/cmd/dehub/dcmd"
"dehub.dev/src/dehub.git/sigcred"
"gopkg.in/src-d/go-git.v4/plumbing"
)
func cmdCommit(ctx context.Context, cmd *dcmd.Cmd) {
flag := cmd.FlagSet()
accountID := flag.String("as", "", "Account to accredit commit with")
pgpKeyID := flag.String("anon-pgp-key", "", "ID of pgp key to sign with instead of using an account")
var proj proj
proj.initFlags(flag)
accreditAndCommit := func(payUn dehub.PayloadUnion) error {
var sig sigcred.Signifier
if *accountID != "" {
cfg, err := proj.LoadConfig()
if err != nil {
return err
}
var account dehub.Account
var ok bool
for _, account = range cfg.Accounts {
if account.ID == *accountID {
ok = true
break
}
}
if !ok {
return fmt.Errorf("account ID %q not found in config", *accountID)
} else if l := len(account.Signifiers); l == 0 || l > 1 {
return fmt.Errorf("account %q has %d signifiers, only one is supported right now", *accountID, l)
}
sig = account.Signifiers[0].Signifier(*accountID)
} else {
var err error
if sig, err = sigcred.LoadSignifierPGP(*pgpKeyID, true); err != nil {
return fmt.Errorf("loading pgp key %q: %w", *pgpKeyID, err)
}
}
payUn, err := proj.AccreditPayload(payUn, sig)
if err != nil {
return fmt.Errorf("accrediting payload: %w", err)
}
commit, err := proj.Commit(payUn)
if err != nil {
return fmt.Errorf("committing to git: %w", err)
}
fmt.Printf("committed to HEAD as %s\n", commit.Hash)
return nil
}
var hasStaged bool
body := func() (context.Context, error) {
if *accountID == "" && *pgpKeyID == "" {
return nil, errors.New("-as or -anon-pgp-key is required")
}
if err := proj.openProj(); err != nil {
return nil, err
}
var err error
if hasStaged, err = proj.HasStagedChanges(); err != nil {
return nil, fmt.Errorf("determining if any changes have been staged: %w", err)
}
return ctx, nil
}
cmd.SubCmd("change", "Commit file changes",
func(ctx context.Context, cmd *dcmd.Cmd) {
flag := cmd.FlagSet()
description := flag.String("descr", "", "Description of changes")
amend := flag.Bool("amend", false, "Add changes to HEAD commit, amend its message, and re-accredit it")
cmd.Run(func() (context.Context, error) {
if !hasStaged && !*amend {
return nil, errors.New("no changes have been staged for commit")
}
var prevMsg string
if *amend {
oldHead, err := proj.softReset("change")
if err != nil {
return nil, err
}
prevMsg = oldHead.Payload.Change.Description
}
if *description == "" {
var err error
if *description, err = tmpFileMsg(defaultCommitFileMsgTpl, prevMsg); err != nil {
return nil, fmt.Errorf("error collecting commit message from user: %w", err)
} else if *description == "" {
return nil, errors.New("empty description, not doing anything")
}
}
payUn, err := proj.NewPayloadChange(*description)
if err != nil {
return nil, fmt.Errorf("could not construct change payload: %w", err)
} else if err := accreditAndCommit(payUn); err != nil {
return nil, err
}
return nil, nil
})
},
)
cmd.SubCmd("credential", "Commit credential of one or more change commits",
func(ctx context.Context, cmd *dcmd.Cmd) {
flag := cmd.FlagSet()
startRev := flag.String("start", "", "Revision of the starting commit to accredit (when accrediting a range of changes)")
endRev := flag.String("end", "HEAD", "Revision of the ending commit to accredit (when accrediting a range of changes)")
rev := flag.String("rev", "", "Revision of commit to accredit (when accrediting a single commit)")
description := flag.String("descr", "", "Description of changes being accredited")
cmd.Run(func() (context.Context, error) {
if *rev == "" && *startRev == "" {
return nil, errors.New("-rev or -start is required")
} else if hasStaged {
return nil, errors.New("credential commit cannot have staged changes")
}
var commits []dehub.Commit
if *rev != "" {
commit, err := proj.GetCommitByRevision(plumbing.Revision(*rev))
if err != nil {
return nil, fmt.Errorf("resolving revision %q: %w", *rev, err)
}
commits = []dehub.Commit{commit}
} else {
var err error
commits, err = proj.GetCommitRangeByRevision(
plumbing.Revision(*startRev),
plumbing.Revision(*endRev),
)
if err != nil {
return nil, fmt.Errorf("resolving revisions %q to %q: %w",
*startRev, *endRev, err)
}
}
var credPayUn dehub.PayloadUnion
if len(commits) == 0 {
return nil, errors.New("cannot create credential based on empty range of commits")
} else if len(commits) == 1 && commits[0].Payload.Credential != nil {
credPayUn = commits[0].Payload
} else {
if *description == "" {
lastDescr, err := dehub.LastChangeDescription(commits)
if err != nil {
return nil, fmt.Errorf("determining change description of commit(s): %w", err)
}
*description, err = tmpFileMsg(defaultCommitFileMsgTpl, lastDescr)
if err != nil {
return nil, fmt.Errorf("collecting credential description from user: %w", err)
} else if *description == "" {
return nil, errors.New("empty description, not doing anything")
}
}
var err error
credPayUn, err = proj.NewPayloadCredentialFromChanges(*description, commits)
if err != nil {
return nil, fmt.Errorf("constructing credential commit: %w", err)
}
}
if err := accreditAndCommit(credPayUn); err != nil {
return nil, err
}
return nil, nil
})
},
)
cmd.SubCmd("comment", "Commit a comment to a branch",
func(ctx context.Context, cmd *dcmd.Cmd) {
flag := cmd.FlagSet()
comment := flag.String("comment", "", "Comment message")
amend := flag.Bool("amend", false, "Amend the comment message currently in HEAD")
cmd.Run(func() (context.Context, error) {
if hasStaged {
return nil, errors.New("comment commit cannot have staged changes")
}
var prevComment string
if *amend {
oldHead, err := proj.softReset("comment")
if err != nil {
return nil, err
}
prevComment = oldHead.Payload.Comment.Comment
}
if *comment == "" {
var err error
if *comment, err = tmpFileMsg(defaultCommitFileMsgTpl, prevComment); err != nil {
return nil, fmt.Errorf("collecting comment message from user: %w", err)
} else if *comment == "" {
return nil, errors.New("empty comment message, not doing anything")
}
}
payUn, err := proj.NewPayloadComment(*comment)
if err != nil {
return nil, fmt.Errorf("constructing comment commit: %w", err)
}
return nil, accreditAndCommit(payUn)
})
},
)
cmd.Run(body)
}
func cmdCombine(ctx context.Context, cmd *dcmd.Cmd) {
flag := cmd.FlagSet()
onto := flag.String("onto", "", "Branch the new commit should be put onto")
startRev := flag.String("start", "", "Revision of the starting commit to combine")
endRev := flag.String("end", "", "Revision of the ending commit to combine")
var proj proj
proj.initFlags(flag)
cmd.Run(func() (context.Context, error) {
if *onto == "" ||
*startRev == "" ||
*endRev == "" {
return nil, errors.New("-onto, -start, and -end are required")
}
if err := proj.openProj(); err != nil {
return nil, err
}
commits, err := proj.GetCommitRangeByRevision(
plumbing.Revision(*startRev),
plumbing.Revision(*endRev),
)
if err != nil {
return nil, fmt.Errorf("error getting commits %q to %q: %w",
*startRev, *endRev, err)
}
ontoBranch := plumbing.NewBranchReferenceName(*onto)
commit, err := proj.CombinePayloadChanges(commits, ontoBranch)
if err != nil {
return nil, err
}
fmt.Printf("new commit %q added to branch %q\n", commit.Hash, ontoBranch.Short())
return nil, nil
})
}

View File

@ -1,66 +0,0 @@
package main
import (
"bufio"
"context"
"errors"
"fmt"
"io"
"os"
"strings"
"dehub.dev/src/dehub.git/cmd/dehub/dcmd"
"gopkg.in/src-d/go-git.v4/plumbing"
)
func cmdHook(ctx context.Context, cmd *dcmd.Cmd) {
flag := cmd.FlagSet()
var proj proj
proj.initFlags(flag)
body := func() (context.Context, error) {
if err := proj.openProj(); err != nil {
return nil, err
}
return ctx, nil
}
cmd.SubCmd("pre-receive", "Use dehub as a server-side pre-receive hook",
func(ctx context.Context, cmd *dcmd.Cmd) {
cmd.Run(func() (context.Context, error) {
br := bufio.NewReader(os.Stdin)
for {
line, err := br.ReadString('\n')
if errors.Is(err, io.EOF) {
break
} else if err != nil {
return nil, fmt.Errorf("error reading next line from stdin: %w", err)
}
fmt.Printf("Processing line %q\n", strings.TrimSpace(line))
lineParts := strings.Fields(line)
if len(lineParts) < 3 {
return nil, fmt.Errorf("malformed pre-receive hook stdin line %q", line)
}
endHash := plumbing.NewHash(lineParts[1])
branchName := plumbing.ReferenceName(lineParts[2])
if !branchName.IsBranch() {
return nil, fmt.Errorf("reference %q is not a branch, can't push to it", branchName)
} else if endHash == plumbing.ZeroHash {
return nil, errors.New("deleting remote branches is not currently supported")
}
return nil, proj.VerifyCanSetBranchHEADTo(branchName, endHash)
}
fmt.Println("All pushed commits have been verified, well done.")
return nil, nil
})
},
)
cmd.Run(body)
}

View File

@ -1,27 +0,0 @@
package main
import (
"context"
"fmt"
"dehub.dev/src/dehub.git"
"dehub.dev/src/dehub.git/cmd/dehub/dcmd"
)
func cmdInit(ctx context.Context, cmd *dcmd.Cmd) {
flag := cmd.FlagSet()
path := flag.String("path", ".", "Path to initialize the project at")
bare := flag.Bool("bare", false, "Initialize the git repo as a bare repository")
remote := flag.Bool("remote", false, "Configure the git repo to allow it to be used as a remote endpoint")
cmd.Run(func() (context.Context, error) {
_, err := dehub.InitProject(*path,
dehub.InitBareRepo(*bare),
dehub.InitRemoteRepo(*remote),
)
if err != nil {
return nil, fmt.Errorf("initializing repo at %q: %w", *path, err)
}
return nil, nil
})
}

View File

@ -1,75 +0,0 @@
package main
import (
"errors"
"flag"
"fmt"
"os"
"dehub.dev/src/dehub.git"
"gopkg.in/src-d/go-git.v4/plumbing"
)
type proj struct {
bare bool
*dehub.Project
}
func (proj *proj) initFlags(flag *flag.FlagSet) {
flag.BoolVar(&proj.bare, "bare", false, "If set then the project being opened will be expected to have a bare git repo")
}
func (proj *proj) openProj() error {
var err error
if proj.Project, err = dehub.OpenProject(".", dehub.OpenBareRepo(proj.bare)); err != nil {
wd, _ := os.Getwd()
return fmt.Errorf("opening repo at %q: %w", wd, err)
}
return nil
}
// softReset resets to HEAD^ (or to an orphaned index, if HEAD has no parents),
// returning the old HEAD.
func (proj *proj) softReset(expType string) (dehub.Commit, error) {
head, err := proj.GetHeadCommit()
if err != nil {
return head, fmt.Errorf("getting HEAD commit: %w", err)
} else if typ := head.Payload.Type(); expType != "" && typ != expType {
return head, fmt.Errorf("expected HEAD to be have a %q payload, but found a %q payload",
expType, typ)
}
branchName, branchErr := proj.ReferenceToBranchName(plumbing.HEAD)
numParents := head.Object.NumParents()
if numParents > 1 {
return head, errors.New("cannot reset to parent of a commit with multiple parents")
} else if numParents == 0 {
// if there are no parents then HEAD is the only commit in the branch.
// Don't handle ErrNoBranchReference because there's not really anything
// which can be done for that; we can't set head to "no commit".
// Otherwise, just remove the branch reference, HEAD will still point to
// it and all of HEAD's changes will be in the index.
if branchErr != nil {
return head, branchErr
} else if err := proj.GitRepo.Storer.RemoveReference(branchName); err != nil {
return head, fmt.Errorf("removing reference %q: %w", branchName, err)
}
return head, nil
}
refName := branchName
if errors.Is(branchErr, dehub.ErrNoBranchReference) {
refName = plumbing.HEAD
} else if err != nil {
return head, fmt.Errorf("resolving HEAD: %w", err)
}
parentHash := head.Object.ParentHashes[0]
newHeadRef := plumbing.NewHashReference(refName, parentHash)
if err := proj.GitRepo.Storer.SetReference(newHeadRef); err != nil {
return head, fmt.Errorf("storing reference %q: %w", newHeadRef, err)
}
return head, nil
}

View File

@ -1,48 +0,0 @@
package main
import (
"context"
"fmt"
"dehub.dev/src/dehub.git"
"dehub.dev/src/dehub.git/cmd/dehub/dcmd"
"gopkg.in/src-d/go-git.v4/plumbing"
)
func cmdVerify(ctx context.Context, cmd *dcmd.Cmd) {
flag := cmd.FlagSet()
rev := flag.String("rev", "HEAD", "Revision of commit to verify")
branch := flag.String("branch", "", "Branch that the revision is on. If not given then the currently checked out branch is assumed")
var proj proj
proj.initFlags(flag)
cmd.Run(func() (context.Context, error) {
if err := proj.openProj(); err != nil {
return nil, err
}
commit, err := proj.GetCommitByRevision(plumbing.Revision(*rev))
if err != nil {
return nil, fmt.Errorf("resolving revision %q: %w", *rev, err)
}
var branchName plumbing.ReferenceName
if *branch == "" {
if branchName, err = proj.ReferenceToBranchName(plumbing.HEAD); err != nil {
return nil, fmt.Errorf("determining branch at HEAD: %w", err)
}
} else {
branchName = plumbing.NewBranchReferenceName(*branch)
}
if err := proj.VerifyCommits(branchName, []dehub.Commit{commit}); err != nil {
return nil, fmt.Errorf("could not verify commit at %q (%s): %w",
*rev, commit.Hash, err)
}
fmt.Printf("commit at %q (%s) is good to go!\n", *rev, commit.Hash)
return nil, nil
})
}

View File

@ -1,191 +0,0 @@
// Package dcmd implements command and sub-command parsing and runtime
// management. It wraps the stdlib flag package as well, to incorporate
// configuration into the mix.
package dcmd
import (
"context"
"errors"
"flag"
"fmt"
"os"
"sort"
"strings"
)
func exitErr(err error) {
fmt.Fprintf(os.Stderr, "exiting: %v\n", err)
os.Stderr.Sync()
os.Stdout.Sync()
os.Exit(1)
}
type subCmd struct {
name, descr string
run func(context.Context, *Cmd)
}
// Cmd wraps a flag.FlagSet instance to provide extra functionality that dehub
// wants, specifically around sub-command support.
type Cmd struct {
flagSet *flag.FlagSet
binary string // only gets set on root Cmd, during Run
subCmds []subCmd
// these fields get set by the parent Cmd, if this is a sub-command.
name string
args []string
parent *Cmd
}
// New initializes and returns an empty Cmd instance.
func New() *Cmd {
return &Cmd{}
}
func (cmd *Cmd) getFlagSet() *flag.FlagSet {
if cmd.flagSet == nil {
cmd.flagSet = flag.NewFlagSet(cmd.name, flag.ContinueOnError)
}
return cmd.flagSet
}
func (cmd *Cmd) numFlags() int {
var n int
cmd.getFlagSet().VisitAll(func(*flag.Flag) {
n++
})
return n
}
// FlagSet returns a flag.Cmd instance on which parameter creation methods can
// be called, e.g. String(...) or Int(...).
func (cmd *Cmd) FlagSet() *flag.FlagSet {
return cmd.getFlagSet()
}
// SubCmd registers a sub-command of this Cmd.
//
// A new Cmd will be instantiated when this sub-command is picked on the
// command-line during this Cmd's Run method. The Context returned from that Run
// and the new Cmd will be passed into the callback given here. The sub-command
// should then be performed in the same manner as this Cmd is performed
// (including setting flags, adding sub-sub-commands, etc...)
func (cmd *Cmd) SubCmd(name, descr string, run func(context.Context, *Cmd)) {
cmd.subCmds = append(cmd.subCmds, subCmd{
name: name,
descr: descr,
run: run,
})
// it's not the most efficient to do this here, but it is the easiest
sort.Slice(cmd.subCmds, func(i, j int) bool {
return cmd.subCmds[i].name < cmd.subCmds[j].name
})
}
func (cmd *Cmd) printUsageHead(subCmdTitle string) {
hasFlags := cmd.numFlags() > 0
var title string
if cmd.parent == nil {
title = fmt.Sprintf("USAGE: %s", cmd.binary)
if hasFlags {
title += " [flags]"
}
} else {
title = fmt.Sprintf("%s", cmd.name)
if hasFlags {
title += fmt.Sprintf(" [%s flags]", cmd.name)
}
}
if subCmdTitle != "" {
title += " " + subCmdTitle
} else if len(cmd.subCmds) > 0 {
title += fmt.Sprint(" <sub-command> [sub-command flags]")
}
if cmd.parent == nil {
fmt.Printf("\n%s\n\n", title)
} else {
cmd.parent.printUsageHead(title)
}
if hasFlags {
if cmd.parent == nil {
fmt.Print("### FLAGS ###\n\n")
} else {
fmt.Printf("### %s FLAGS ###\n\n", strings.ToUpper(cmd.name))
}
cmd.getFlagSet().PrintDefaults()
fmt.Print("\n")
}
}
// Run performs the comand. It starts by parsing all flags in the Cmd's FlagSet,
// and possibly exiting with a usage message if appropriate. It will then
// perform the given body callback, and then perform any sub-commands (if
// selected).
//
// The context returned from the callback will be passed into the callback
// (given to SubCmd) of any sub-commands which are run, and so on.
func (cmd *Cmd) Run(body func() (context.Context, error)) {
args := cmd.args
if cmd.parent == nil {
cmd.binary, args = os.Args[0], os.Args[1:]
}
fs := cmd.getFlagSet()
fs.Usage = func() {
cmd.printUsageHead("")
if len(cmd.subCmds) == 0 {
return
}
fmt.Printf("### SUB-COMMANDS ###\n\n")
for _, subCmd := range cmd.subCmds {
fmt.Printf("\t%s : %s\n", subCmd.name, subCmd.descr)
}
fmt.Println("")
}
if err := fs.Parse(args); err != nil {
exitErr(err)
return
}
ctx, err := body()
if err != nil {
exitErr(err)
}
// body has run, now do sub-command (if there is one)
subArgs := fs.Args()
if len(cmd.subCmds) == 0 {
return
} else if len(subArgs) == 0 && len(cmd.subCmds) > 0 {
fs.Usage()
exitErr(errors.New("no sub-command selected"))
}
// now find that sub-command
subCmdName := strings.ToLower(subArgs[0])
var subCmd subCmd
var subCmdOk bool
for _, subCmd = range cmd.subCmds {
if subCmdOk = subCmd.name == subCmdName; subCmdOk {
break
}
}
if !subCmdOk {
fs.Usage()
exitErr(fmt.Errorf("unknown command %q", subCmdName))
}
subCmdCmd := New()
subCmdCmd.name = subCmd.name
subCmdCmd.args = subArgs[1:]
subCmdCmd.parent = cmd
subCmd.run(ctx, subCmdCmd)
}

View File

@ -1,20 +0,0 @@
package main
import (
"context"
"dehub.dev/src/dehub.git/cmd/dehub/dcmd"
)
func main() {
cmd := dcmd.New()
cmd.SubCmd("init", "Initialize a new project in a directory", cmdInit)
cmd.SubCmd("commit", "Commits staged changes to the head of the current branch", cmdCommit)
cmd.SubCmd("verify", "Verifies one or more commits as having the proper credentials", cmdVerify)
cmd.SubCmd("hook", "Use dehub as a git hook", cmdHook)
cmd.SubCmd("combine", "Combine multiple change and credential commits into a single commit", cmdCombine)
cmd.Run(func() (context.Context, error) {
return context.Background(), nil
})
}

View File

@ -1,72 +0,0 @@
package main
import (
"bufio"
"bytes"
"errors"
"fmt"
"io"
"io/ioutil"
"os"
"os/exec"
"strings"
)
const defaultCommitFileMsgTpl = `%s
# Please enter the description for your commit(s). Lines starting with '#' will
# be ignored, and an empty message aborts the commit.`
func tmpFileMsg(tpl string, args ...interface{}) (string, error) {
editor := os.Getenv("EDITOR")
if editor == "" {
return "", errors.New("EDITOR not set, please set it or use -msg in order to create your commit message")
} else if _, err := os.Stat(editor); err != nil {
return "", fmt.Errorf("could not stat EDITOR %q: %w", editor, err)
}
tmpf, err := ioutil.TempFile("", "dehub.*.txt")
if err != nil {
return "", fmt.Errorf("could not open temp file: %w", err)
}
tmpfName := tmpf.Name()
defer os.Remove(tmpfName)
tmpBody := bytes.NewBufferString(fmt.Sprintf(tpl, args...))
_, err = io.Copy(tmpf, tmpBody)
tmpf.Close()
if err != nil {
return "", fmt.Errorf("could not write helper message to temp file %q: %w", tmpfName, err)
}
cmd := exec.Command(editor, tmpfName)
cmd.Stdin = os.Stdin
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
if err := cmd.Run(); err != nil {
return "", fmt.Errorf("error running '%s %q': %w", editor, tmpfName, err)
}
body, err := ioutil.ReadFile(tmpfName)
if err != nil {
return "", fmt.Errorf("error retrieving message body from %q: %w", tmpfName, err)
}
bodyFiltered := new(bytes.Buffer)
bodyBR := bufio.NewReader(bytes.NewBuffer(body))
for {
line, err := bodyBR.ReadString('\n')
if errors.Is(err, io.EOF) {
break
} else if err != nil {
return "", fmt.Errorf("error reading from buffered body: %w", err)
}
if !strings.HasPrefix(strings.TrimSpace(line), "#") {
bodyFiltered.WriteString(line)
}
}
return strings.TrimSpace(bodyFiltered.String()), nil
}

View File

@ -1,27 +0,0 @@
# git-http-server
A simple http server which uses a git repo (bare or otherwise) as the underlying
filesystem.
* Automatically renders markdown files as html.
* Will use `README.md` as the index, if available.
* Can be set to use a specific branch.
All configuration is done on the command-line.
# Installation
Installation of git-http-server is done in the same manner as the `dehub`
command itself:
```
go get dehub.dev/src/dehub.git/cmd/git-http-server
```
# Markdown
TODO
# Templates
TODO

View File

@ -1,11 +0,0 @@
module dehub/cmd/git-http-server
go 1.14
require (
github.com/gomarkdown/markdown v0.0.0-20200513213024-62c5e2c608cc
golang.org/x/crypto v0.0.0-20200109152110-61a87790db17 // indirect
gopkg.in/src-d/go-git.v4 v4.13.1
)
replace dehub => ../../

View File

@ -1,79 +0,0 @@
github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7 h1:uSoVVbwJiQipAclBbw+8quDsfcvFjOpI5iCf4p/cqCs=
github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/ghQa61ZWa/C2Aw3RkjiTBOix7dkqa1VLIs=
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239 h1:kFOfPq6dUM1hTo4JG6LR5AXSUEsOjtdm0kw0FtQtMJA=
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs=
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg=
github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o=
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 h1:BHsljHzVlRcyQhjrss6TZTdY2VfCqZPbv5k3iBFa2ZQ=
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc=
github.com/gliderlabs/ssh v0.2.2 h1:6zsha5zo/TWhRhwqCD3+EarCAgZ2yN28ipRnGPnwkI0=
github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0=
github.com/gomarkdown/markdown v0.0.0-20200513213024-62c5e2c608cc h1:T+Fwk3llJdUIQeBI8fC/ARqRD5mWy3AE5I6ZU3VkIw8=
github.com/gomarkdown/markdown v0.0.0-20200513213024-62c5e2c608cc/go.mod h1:aii0r/K0ZnHv7G0KF7xy1v0A7s2Ljrb5byB7MO5p6TU=
github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo=
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd h1:Coekwdh0v2wtGp9Gmz1Ze3eVRAWJMLokvN3QjdzCHLY=
github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/pelletier/go-buffruneio v0.2.0/go.mod h1:JkE26KsDizTr40EUHkXVtNPvgGtbSNq5BcowyYOWdKo=
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/src-d/gcfg v1.4.0 h1:xXbNR5AlLSA315x2UO+fTSSAXCDf+Ar38/6oyGbDKQ4=
github.com/src-d/gcfg v1.4.0/go.mod h1:p/UMsR43ujA89BJY9duynAwIpvqEujIH/jFlfL7jWoI=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/xanzy/ssh-agent v0.2.1 h1:TCbipTQL2JiiCprBWx9frJ2eJlCYT00NmctrHxVAr70=
github.com/xanzy/ssh-agent v0.2.1/go.mod h1:mLlQY/MoOhWBj+gOGMQkOeiEvkx+8pJSI+0Bx9h2kr4=
golang.org/dl v0.0.0-20190829154251-82a15e2f2ead/go.mod h1:IUMfjQLJQd4UTqG1Z90tenwKoCX93Gn3MAQJMOSBsDQ=
golang.org/x/crypto v0.0.0-20190219172222-a4c6cb3142f2/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20200109152110-61a87790db17 h1:nVJ3guKA9qdkEQ3TUdXI9QSINo2CUPM/cySEvw2w8I0=
golang.org/x/crypto v0.0.0-20200109152110-61a87790db17/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190724013045-ca1201d0de80 h1:Ao/3l156eZf2AW5wK8a7/smtodRU+gha3+BeqJ69lRk=
golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190221075227-b4e8571b14e0/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e h1:D5TXcfTk7xF7hvieo4QErS3qqCB4teTffacDWr7CI+0=
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190729092621-ff9f1409240a/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/src-d/go-billy.v4 v4.3.2 h1:0SQA1pRztfTFx2miS8sA97XvooFeNOmvUenF4o0EcVg=
gopkg.in/src-d/go-billy.v4 v4.3.2/go.mod h1:nDjArDMp+XMs1aFAESLRjfGSgfvoYN0hDfzEk0GjC98=
gopkg.in/src-d/go-git-fixtures.v3 v3.5.0 h1:ivZFOIltbce2Mo8IjzUHAFoq/IylO9WHhNOAJK+LsJg=
gopkg.in/src-d/go-git-fixtures.v3 v3.5.0/go.mod h1:dLBcvytrw/TYZsNTWCnkNF2DSIlzWYqTe3rJR56Ac7g=
gopkg.in/src-d/go-git.v4 v4.13.1 h1:SRtFyV8Kxc0UP7aCHcijOMQGPxHSmMOPrzulQWolkYE=
gopkg.in/src-d/go-git.v4 v4.13.1/go.mod h1:nx5NYcxdKxq5fpltdHnPa2Exj4Sx0EclMWZQbYDu2z8=
gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=

View File

@ -1,154 +0,0 @@
package main
import (
"bytes"
"errors"
"flag"
"fmt"
"io/ioutil"
"log"
"net/http"
"path/filepath"
"strings"
"text/template"
"time"
"github.com/gomarkdown/markdown"
"gopkg.in/src-d/go-git.v4"
"gopkg.in/src-d/go-git.v4/plumbing"
"gopkg.in/src-d/go-git.v4/plumbing/object"
)
type handler struct {
repo *git.Repository
branch plumbing.ReferenceName
tpl *template.Template
}
func (h handler) getTree(r *http.Request) (*object.Tree, int, error) {
rev := plumbing.Revision(r.FormValue("rev"))
if rev == "" {
rev = plumbing.Revision(h.branch)
}
hashPtr, err := h.repo.ResolveRevision(rev)
if err != nil {
return nil, 404, fmt.Errorf("resolving revision %q: %w", rev, err)
}
hash := *hashPtr // I don't know why ResolveRevision returns a pointer
commit, err := h.repo.CommitObject(hash)
if err != nil {
return nil, 404, fmt.Errorf("retrieving commit for revision %q (%q): %w",
rev, hash, err)
}
tree, err := h.repo.TreeObject(commit.TreeHash)
if err != nil {
return nil, 500, fmt.Errorf("fetching tree %q of commit %q: %v",
commit.TreeHash, hash, err)
}
return tree, 0, nil
}
func (h handler) ServeHTTP(rw http.ResponseWriter, r *http.Request) {
path := r.URL.Path
var mdPath string
if strings.HasSuffix(path, "/") {
mdPath = filepath.Join(path, "README.md") // do before modifying path
path = filepath.Join(path, "index.html")
} else if strings.HasSuffix(path, "/index.html") {
mdPath = filepath.Join(filepath.Dir(path), "README.md")
} else if filepath.Ext(path) == ".html" {
mdPath = strings.TrimSuffix(path, ".html") + ".md"
}
path = strings.TrimPrefix(path, "/")
mdPath = strings.TrimPrefix(mdPath, "/")
tree, errStatusCode, err := h.getTree(r)
if err != nil {
http.Error(rw, err.Error(), errStatusCode)
return
}
var usingMD bool
f, err := tree.File(path)
if errors.Is(err, object.ErrFileNotFound) {
usingMD = true
f, err = tree.File(mdPath)
}
if errors.Is(err, object.ErrFileNotFound) {
http.Error(rw, fmt.Sprintf("%q not found", path), 404)
return
} else if err != nil {
log.Printf("fetching file %q / %q: %v", path, mdPath, err)
http.Error(rw, "internal error", 500)
return
}
fr, err := f.Blob.Reader()
if err != nil {
log.Printf("getting reader of file %q: %v", f.Name, err)
http.Error(rw, "internal error", 500)
return
}
defer fr.Close()
b, err := ioutil.ReadAll(fr)
if err != nil {
log.Printf("reading in contents of file %q: %v", f.Name, err)
http.Error(rw, "internal error", 500)
return
}
if !usingMD {
http.ServeContent(rw, r, filepath.Base(path), time.Now(), bytes.NewReader(b))
return
}
mdHTML := markdown.ToHTML(b, nil, nil)
if h.tpl == nil {
http.ServeContent(rw, r, filepath.Base(path), time.Now(), bytes.NewReader(mdHTML))
return
}
h.tpl.Execute(rw, struct {
Body string
}{string(mdHTML)})
}
func main() {
addr := flag.String("addr", ":8000", "Address to listen for http requests on")
branchName := flag.String("branch", "master", "git branch to serve the HEAD of")
repoPath := flag.String("repo-path", ".", "Path to the git repository to server")
tplPath := flag.String("tpl-path", "", "Path to an optional template file which can be used when rendering markdown")
flag.Parse()
repo, err := git.PlainOpen(*repoPath)
if err != nil {
log.Fatalf("opening git repo at path %q: %v", *repoPath, err)
}
branch := plumbing.NewBranchReferenceName(*branchName)
// do an initial check for the branch, for funsies
if _, err := repo.Reference(branch, true); err != nil {
log.Fatalf("resolving reference %q: %v", branch, err)
}
h := &handler{
repo: repo,
branch: branch,
}
if *tplPath != "" {
h.tpl = template.Must(template.ParseFiles(*tplPath))
}
log.Printf("listening on %q", *addr)
http.ListenAndServe(*addr, h)
}

222
commit.go
View File

@ -1,222 +0,0 @@
package dehub
import (
"encoding/hex"
"errors"
"fmt"
"path/filepath"
"strings"
"gopkg.in/src-d/go-git.v4/plumbing"
"gopkg.in/src-d/go-git.v4/plumbing/object"
)
// Commit wraps a single git commit object, and also contains various fields
// which are parsed out of it, including the payload. It is used as a
// convenience type, in place of having to manually retrieve and parse specific
// information out of commit objects.
type Commit struct {
Payload PayloadUnion
Hash plumbing.Hash
Object *object.Commit
TreeObject *object.Tree
}
// GetCommit retrieves the Commit at the given hash, and all of its sub-data
// which can be pulled out of it.
func (proj *Project) GetCommit(h plumbing.Hash) (c Commit, err error) {
if c.Object, err = proj.GitRepo.CommitObject(h); err != nil {
return c, fmt.Errorf("getting git commit object: %w", err)
} else if c.TreeObject, err = proj.GitRepo.TreeObject(c.Object.TreeHash); err != nil {
return c, fmt.Errorf("getting git tree object %q: %w",
c.Object.TreeHash, err)
} else if err = c.Payload.UnmarshalText([]byte(c.Object.Message)); err != nil {
return c, fmt.Errorf("decoding commit message: %w", err)
}
c.Hash = c.Object.Hash
return
}
// ErrHeadIsZero is used to indicate that HEAD resolves to the zero hash. An
// example of when this can happen is if the project was just initialized and
// has no commits, or if an orphan branch is checked out.
var ErrHeadIsZero = errors.New("HEAD resolves to the zero hash")
// GetHeadCommit returns the Commit which is currently referenced by HEAD.
// This method may return ErrHeadIsZero if HEAD resolves to the zero hash.
func (proj *Project) GetHeadCommit() (Commit, error) {
headHash, err := proj.ReferenceToHash(plumbing.HEAD)
if err != nil {
return Commit{}, fmt.Errorf("resolving HEAD: %w", err)
} else if headHash == plumbing.ZeroHash {
return Commit{}, ErrHeadIsZero
}
c, err := proj.GetCommit(headHash)
if err != nil {
return Commit{}, fmt.Errorf("getting commit %q: %w", headHash, err)
}
return c, nil
}
// GetCommitRange returns an ancestry of Commits, with the first being the
// commit immediately following the given starting hash, and the last being the
// given ending hash.
//
// If start is plumbing.ZeroHash then the root commit will be the starting hash.
func (proj *Project) GetCommitRange(start, end plumbing.Hash) ([]Commit, error) {
curr, err := proj.GetCommit(end)
if err != nil {
return nil, fmt.Errorf("retrieving commit %q: %w", end, err)
}
var commits []Commit
var found bool
for {
if found = start != plumbing.ZeroHash && curr.Hash == start; found {
break
}
commits = append(commits, curr)
numParents := curr.Object.NumParents()
if numParents == 0 {
break
} else if numParents > 1 {
return nil, fmt.Errorf("commit %q has more than one parent: %+v",
curr.Hash, curr.Object.ParentHashes)
}
parentHash := curr.Object.ParentHashes[0]
parent, err := proj.GetCommit(parentHash)
if err != nil {
return nil, fmt.Errorf("retrieving commit %q: %w", parentHash, err)
}
curr = parent
}
if !found && start != plumbing.ZeroHash {
return nil, fmt.Errorf("unable to find commit %q as an ancestor of %q",
start, end)
}
// reverse the commits to be in the expected order
for l, r := 0, len(commits)-1; l < r; l, r = l+1, r-1 {
commits[l], commits[r] = commits[r], commits[l]
}
return commits, nil
}
var (
hashStrLen = len(plumbing.ZeroHash.String())
errNotHex = errors.New("not a valid hex string")
)
func (proj *Project) findCommitByShortHash(hashStr string) (plumbing.Hash, error) {
paddedHashStr := hashStr
if len(hashStr)%2 > 0 {
paddedHashStr += "0"
}
if hashB, err := hex.DecodeString(paddedHashStr); err != nil {
return plumbing.ZeroHash, errNotHex
} else if len(hashStr) == hashStrLen {
var hash plumbing.Hash
copy(hash[:], hashB)
return hash, nil
} else if len(hashStr) < 2 {
return plumbing.ZeroHash, errors.New("hash string must be 2 characters long or more")
}
for i := 2; i < hashStrLen; i++ {
hashPrefix, hashTail := hashStr[:i], hashStr[i:]
path := filepath.Join("objects", hashPrefix)
fileInfos, err := proj.GitDirFS.ReadDir(path)
if err != nil {
return plumbing.ZeroHash, fmt.Errorf("listing files in %q: %w", path, err)
}
var matchedHash plumbing.Hash
for _, fileInfo := range fileInfos {
objFileName := fileInfo.Name()
if !strings.HasPrefix(objFileName, hashTail) {
continue
}
objHash := plumbing.NewHash(hashPrefix + objFileName)
obj, err := proj.GitRepo.Storer.EncodedObject(plumbing.AnyObject, objHash)
if err != nil {
return plumbing.ZeroHash, fmt.Errorf("reading object %q off disk: %w", objHash, err)
} else if obj.Type() != plumbing.CommitObject {
continue
} else if matchedHash == plumbing.ZeroHash {
matchedHash = objHash
continue
}
return plumbing.ZeroHash, fmt.Errorf("both %q and %q match", matchedHash, objHash)
}
if matchedHash != plumbing.ZeroHash {
return matchedHash, nil
}
}
return plumbing.ZeroHash, errors.New("failed to find a commit object with a matching prefix")
}
func (proj *Project) resolveRev(rev plumbing.Revision) (plumbing.Hash, error) {
if rev == plumbing.Revision(plumbing.ZeroHash.String()) {
return plumbing.ZeroHash, nil
}
{
// pretend the revision is a short hash until proven otherwise
shortHash := string(rev)
hash, err := proj.findCommitByShortHash(shortHash)
if errors.Is(err, errNotHex) {
// ok, continue
} else if err != nil {
return plumbing.ZeroHash, fmt.Errorf("resolving as short hash: %w", err)
} else {
// guess it _is_ a short hash, knew it!
return hash, nil
}
}
h, err := proj.GitRepo.ResolveRevision(rev)
if err != nil {
return plumbing.ZeroHash, fmt.Errorf("resolving revision %q: %w", rev, err)
}
return *h, nil
}
// GetCommitByRevision resolves the revision and returns the Commit it references.
func (proj *Project) GetCommitByRevision(rev plumbing.Revision) (Commit, error) {
hash, err := proj.resolveRev(rev)
if err != nil {
return Commit{}, err
}
c, err := proj.GetCommit(hash)
if err != nil {
return Commit{}, fmt.Errorf("getting commit %q: %w", hash, err)
}
return c, nil
}
// GetCommitRangeByRevision is like GetCommitRange, first resolving the given
// revisions into hashes before continuing with GetCommitRange's behavior.
func (proj *Project) GetCommitRangeByRevision(startRev, endRev plumbing.Revision) ([]Commit, error) {
start, err := proj.resolveRev(startRev)
if err != nil {
return nil, err
}
end, err := proj.resolveRev(endRev)
if err != nil {
return nil, err
}
return proj.GetCommitRange(start, end)
}

View File

@ -1,94 +0,0 @@
package dehub
import (
"errors"
"fmt"
"dehub.dev/src/dehub.git/accessctl"
"dehub.dev/src/dehub.git/fs"
"dehub.dev/src/dehub.git/sigcred"
yaml "gopkg.in/yaml.v2"
)
// Account represents a single account defined in the Config.
type Account struct {
ID string `yaml:"id"`
Signifiers []sigcred.SignifierUnion `yaml:"signifiers"`
Meta map[string]string `yaml:"meta,omitempty"`
}
// Config represents the structure of the main dehub configuration file, and is
// used to marshal/unmarshal the yaml file.
type Config struct {
Accounts []Account `yaml:"accounts"`
AccessControls []accessctl.AccessControl `yaml:"access_controls"`
}
func (proj *Project) loadConfig(fs fs.FS) (Config, error) {
rc, err := fs.Open(ConfigPath)
if err != nil {
return Config{}, fmt.Errorf("could not open config.yml: %w", err)
}
defer rc.Close()
var cfg Config
if err := yaml.NewDecoder(rc).Decode(&cfg); err != nil {
return cfg, fmt.Errorf("could not decode config.yml: %w", err)
}
// older config versions also had access_controls be an array, but not using
// the action field. So filter out array elements without the action field.
acl := cfg.AccessControls
cfg.AccessControls = cfg.AccessControls[:0]
for _, ac := range acl {
if ac.Action == "" {
continue
}
cfg.AccessControls = append(cfg.AccessControls, ac)
}
// TODO validate Config
return cfg, nil
}
// LoadConfig loads the Config object from the HEAD of the project's git repo,
// or directly from the filesystem if there is no HEAD yet.
func (proj *Project) LoadConfig() (Config, error) {
headFS, err := proj.headFS()
if err != nil {
return Config{}, fmt.Errorf("error retrieving repo HEAD: %w", err)
}
return proj.loadConfig(headFS)
}
func (proj *Project) signifierForCredential(fs fs.FS, cred sigcred.CredentialUnion) (sigcred.Signifier, error) {
cfg, err := proj.loadConfig(fs)
if err != nil {
return nil, fmt.Errorf("error loading config: %w", err)
}
var account Account
var ok bool
for _, account = range cfg.Accounts {
if account.ID == cred.AccountID {
ok = true
break
}
}
if !ok {
return nil, fmt.Errorf("no account object for account id %q present in config", cred.AccountID)
}
for i, sigUn := range account.Signifiers {
sig := sigUn.Signifier(cred.AccountID)
if ok, err := sig.Signed(fs, cred); err != nil {
return nil, fmt.Errorf("error checking if signfier index:%d signed credential: %w", i, err)
} else if ok {
return sig, nil
}
}
return nil, errors.New("no signifier found for credential")
}

45
diff.go
View File

@ -1,45 +0,0 @@
package dehub
import (
"fmt"
"gopkg.in/src-d/go-git.v4/plumbing"
"gopkg.in/src-d/go-git.v4/plumbing/filemode"
"gopkg.in/src-d/go-git.v4/plumbing/object"
)
// ChangedFile describes a single file which has been changed in some way
// between two object.Trees. If the From fields are empty then the file was
// created, if the To fields are empty then the file was deleted.
type ChangedFile struct {
Path string
FromMode, ToMode filemode.FileMode
FromHash, ToHash plumbing.Hash
}
// ChangedFilesBetweenTrees returns the ChangedFile objects which represent the
// difference between the two given trees.
func ChangedFilesBetweenTrees(from, to *object.Tree) ([]ChangedFile, error) {
changes, err := object.DiffTree(from, to)
if err != nil {
return nil, fmt.Errorf("could not calculate tree diff: %w", err)
}
changedFiles := make([]ChangedFile, len(changes))
for i, change := range changes {
if from := change.From; from.Name != "" {
changedFiles[i].Path = from.Name
changedFiles[i].FromMode = from.TreeEntry.Mode
changedFiles[i].FromHash = from.TreeEntry.Hash
}
if to := change.To; to.Name != "" {
if exPath := changedFiles[i].Path; exPath != "" && exPath != to.Name {
panic(fmt.Sprintf("unexpected changed path from %q to %q", exPath, to.Name))
}
changedFiles[i].Path = to.Name
changedFiles[i].ToMode = to.TreeEntry.Mode
changedFiles[i].ToHash = to.TreeEntry.Hash
}
}
return changedFiles, nil
}

View File

@ -1,71 +0,0 @@
# Roadmap
This document describes currently planned features and events related to the
dehub project. It's intention is to help prioritize work. There are no dates
set, only a sequence of milestones and the requirements to hit them.
## Milestone: IPFS support
* Big ol' question mark on this one.
## Milestone: Versions
* Tag commits
* Add dehub version to payloads, make binary aware of it
* Figure out a release system?
## Milestone: Prime commits
(Cloning/remote management is probably a pre-requisite of this, so it's a good
thing it comes after IPFS support)
* Ability to specify which commit is prime.
* The prime commit is essentially the identifier of the entire project; even
if two project instances share a commit tree, if they are using a
different prime commit then they are not the same project.
## Milestone: Minimal plugin support
* SPEC and implement. Things which should be pluggable, initially:
* Conditions
* Signifiers
* Filters
* Payloads???
## Milestone: Minimal notifications support
* Some way to store notification settings locally, and run a command which shows
a sequence of events since the last time you ran it.
* The command should keep a history of all of its outputs, and allow the
user to see that history (in case they run the command, then clear the
output by accident).
* The user should be able to specifically get notifications on threads
they're a part of, threads by branch name pattern, files by path pattern,
and keywords in commit messages.
# Misc Polish
These tasks aren't necessarily scheduled for any particular milestone, but they
are things that could use doing anyway.
* Config validation. Every interface used by the config should have a
`Validate() error` method, and Config itself should as well.
* Maybe coalesce the `accessctl`, `fs`, and `sigcred` packages back into the
root "dehub" package.
* Polish commands
* New flag system, some kind of interactivity support (e.g. user doesn't
specify required argument, give them a prompt on the CLI to input it
rather than an error). This is partially done, in that a new flag system
has been started. Needs further work.
* Review flags:
* probably make some of them into positional arguments
* add flag shortcuts
* document everything better.
* POSIX compatible-ish flags?
* Possibly save state locally in order to speed things along, such as
"account id" which probably isn't going to change often for a user.

View File

@ -1,501 +0,0 @@
# SPEC
This document describes the dehub protocol.
This document assumes that the reader is familiar with git, both conceptually
and in practical use of the git tool. All references to a git-specific concept
retain their meaning; dehub concepts build upon git concepts, but do not
override them.
## Project {#project}
A dehub project is comprised of:
* A collection of files and directories.
* Meta actions related to those files, e.g. discussion, proposed changes, etc.
* Configuration defining which meta actions are allowed under which
circumstances.
All of these components are housed in a git repository. A dehub project does not
require a central repository location (a "remote"), though it may use one if
desired.
## Commit Payload {#payload}
All commits in a dehub [project](#project) contain a payload. The payload is
encoded into the commit message as a YAML object. Here is the general structure
of a commit message containing a payload:
```
Human readable message head
---
# Three dashes indicate the start of the yaml body.
type: type of the payload # Always required
fingerprint: std-base-64 string # Always required
credentials:[...] # Not required but usually present
type_specific_field_a: valueA
type_specific_field_b: valueB
```
The message head is a human readable description of what is being committed, and
is terminated at the first newline. Everything after the message head must be
valid YAML which encodes the payload.
### Fingerprint {#fingerprint}
Each [payload](#payload) object contains a `fingerprint` field. The fingerprint
is an opaque byte string encoded using standard base-64. The algorithm used to
generate the fingerprint will depend on the payload type, and can be found in
each type's sub-section in this document.
### Credential {#credential}
The `credentials` field is not required, but in practice will be found on almost
every [payload](#payload). The field's value will be an array of credential
objects. Only one credential object is currently supported, `pgp_signature`:
```yaml
type: pgp_signature
# One of these fields is required. If account_id is present, it relates the
# signature to a pgp_public_key signifier defined for that account in the config
# (see the Signifier sub-section). Otherwise, the public key will be included in
# the credential itself as the value of pub_key_body.
account_id: some_user_id # Optional
pub_key_body: inlined ASCII-armored pgp public key
# the ID (pgp fingerprint) of the key used to generate the signature
pub_key_id: XXX
# a signature of the payload's unencoded fingerprint, encoded using standard
# base-64
body: std-base-64 signature
```
### Payload Types {#payload-types}
#### Change Payload {#change-payload}
A change [payload](#payload) encompasses a set of changes to the files in the
project. To construct the change payload one must reference the file tree of the
commit which houses the payload as well as the file tree of its parent commit;
specifically one must take the difference between them.
A change payload looks like this:
```yaml
type: change
fingerprint: std-base-64 string
credentials: [...]
description: |-
The description will generally start with a single line, followed by a long-form body
The description corresponds to the body of a commit message in a "normal"
git repo. It gives a more-or-less long-form explanation of the changes being
made to the project's files.
```
##### Change Payload Fingerprint {#change-payload-fingerprint}
The unencoded [fingerprint](#fingerprint) of a [change payload](#change-payload)
is calculated as follows:
* Concatenate the following:
* A uvarint indicating the number of bytes in the description string.
* The description string.
* A uvarint indicating the number of files changed between this commit and
its parent.
* For each file changed, ordered lexographically-ascending based on its full
relative path within the git repo:
* A uvarint indicating the length of the full relative path of the file
within the repo, as a string.
* The full relative path of the file within the repo, as a string.
* A little-endian uint32 representing the previous file mode of the file
(or 0 if the file is not present in the parent commit's tree).
* The 20-byte SHA1 hash of the contents of the previous version of the file
(or 20 0 bytes if the file is not present in the parent commit's tree).
* A little-endian uint32 representing the new file mode of the file (or 0
if the file is not present in the current commit's tree).
* The 20-byte SHA1 hash of the contents of the new version of the file (or
20 0 bytes if the file is not present in the current commit's tree).
* Calculate the SHA-256 hash of the concatenation result.
* Prepend a 0 byte to the result of the SHA-256 hash.
This unencoded fingerprint is then standard base-64 encoded, and that is used as
the value of the `fingerprint` field.
#### Comment Payload {#comment-payload}
A comment [payload](#payload) encompasses no file changes, and is used only to
contain a comment made by a single user.
A comment payload looks like this:
```yaml:
type: comment
fingerprint: std-base-64 string
credentials: [...]
comment: |-
Hey all, how's it going?
Just wanted to pop by and say howdy.
```
The message head of a comment payload will generally be a truncated form of the
comment itself.
##### Comment Payload Fingerprint {#comment-payload-fingerprint}
The unencoded [fingerprint](#fingerprint) of a [comment
payload](#comment-payload) is calculated as follows:
* Concatenate the following:
* A uvarint indicating the number of bytes in the comment string.
* The comment string.
* Calculate the SHA-256 hash of the concatenation result.
* Prepend a 0 byte to the result of the SHA-256 hash.
This unencoded fingerprint is then standard base-64 encoded, and that is used as
the value of the `fingerprint` field.
#### Credential Payload
A credential [payload](#payload) contains only one or more credentials for an
arbitrary [fingerprint](#fingerprint). Credential payloads can be combined with
other payloads of the same fingerprint to create a new payload with many
credentials.
A credential payload looks like this:
```yaml
type: credential
fingerprint: std-base-64 string
credentials: [...]
# This field is not required, but can be helpful in situations where the
# fingerprint was generated based on multiple change payloads
commits:
- commit hash
- commit hash
- commit hash
# This field is not required, but can be helpful to clarify which description
# was used when generating a change fingerprint.
change_description: blah blah blah
```
## Project Configuration {#project-configuration}
The `.dehub` directory contains all meta information related to the dehub
[project](#project). All files within `.dehub` are tracked by the git repo like
any other files in the project.
### config.yml {#config-yml}
The `.dehub/config.yml` file contains a yaml encoded configuration object:
```yaml
accounts: [...]
access_controls: [...]
```
Both fields are described in their own sub-section below.
#### Account {#account}
An account defines a specific user of a [project](#project). Every account has
an ID; no two accounts within a project may share the same ID.
An account looks like this:
```yaml
id: some_string
signifiers: [...]
```
##### Signifier {#signifier}
A signifier is used to signify that an [account](#account) has taken some
action. The most common use-case is to prove that an account created a
particular [credential](#credential). An account may have more than one
signifier.
Currently there is only one signifier type, `pgp_public_key`:
```yaml
type: pgp_public_key
# Path to ASCII-armored pgp public key, relative to repo root.
path: .dehub/account.asc
```
or
```yaml
type: pgp_public_key
body: inlined ASCII-armored pgp public key
```
#### Access Control {#access-control}
An access control allows or denies a particular commit from becoming a part of
a [project](#project). Each access control has an action (allow or deny) and a
set of filters (filters are described in the next section):
```yaml
action: allow # or deny
filters: [...]
```
When a verifying a commit against a project's access controls, each access
control's filters are applied to the commit in the order they appear in the
configuration. The first access control for which all filters match is found,
and its action is taken.
An access control with no filters matches all commits.
##### Filter {#filter}
There are many kinds of [access control](#access-control) filters. Any filter
can be applied to a commit, with no other input, and produce a boolean value.
All filters have a `type` field which indicates their type.
###### Signature Filter {#signature-filter}
A [filter](#filter) of type `signature` asserts that a commit's
[payload](#payload) contains [signature credentials](#credential) with certain
properties. A signature filter must have one of these fields, which define the
set of users or [accounts](#account) whose signatures are applicable.
* `account_ids: [...]` - an array of account IDs, each having been defined in
the accounts section of the [configuration](#config-yml).
* `any_account: true` - matches any account defined in the accounts section of
the configuration.
* `any: true` - matches any signature, whether or not its signifier has been
defined in the configuration.
A `count` field may also be included. Its value may be an absolute number (e.g.
`5`) or it may be a string indicating a percent (e.g. `"50%"`). If not included
it will be assumed to be `1`.
The count indicates how many accounts from the specified set must have a
signature included. If a percent is given then that will be multiplied against
the size of the set (rounded up) to determine the necessary number.
Here are some example signature filters, and explanations for each:
```yaml
# requires that 2 of the 3 specified accounts has a signature credential on
# the commit.
type: signature
account_ids:
- amy
- bill
- colleen
count: 2
```
```yaml
# requires that every account defined in the configuration has a signature
# credential on the commit.
type: signature
any_account: true
count: 100%
```
```yaml
# requires at least one signature credential, not necessarily from an account.
type: signature
any: true
```
###### Branch Filter {#branch-filter}
A [filter](#filter) of type `branch` matches the commit based on which branch in
the repo it is being or has been committed to. Matching is performed on the
short name of the branch, using globstar pattern matching.
A branch filter can have one or multiple patterns defined. The filter will match
if at least one defined pattern matches the short form of the branch name.
A branch filter with only one pattern can be defined like this:
```yaml
type: branch
pattern: some_branch
```
A branch filter with multiple patterns can be defined like this:
```yaml
type: branch
patterns:
- some_branch
- branch*glob
- amy/**
```
###### Files Changed Filter {#files-changed-filter}
A [filter](#filter) of type `files_changed` matches the commit based on which
files were changed between the tree of the commit's parent and the commit's
tree. Matching is performed on the paths of the changed files, relative to the
repo root.
A files changed filter can have one or multiple patterns defined. The filter
will match if any of the changed files matches at least one defined pattern.
A files changed filter with only one pattern can be defined like this:
```yaml
type: files_changed
pattern: .dehub/*
```
A files changed filter with multiple patterns can be defined like this:
```yaml
type: files_changed
patterns:
- some/dir/*
- foo_files_*
- **.jpg
```
###### Payload Type Filter {#payload-type-filter}
A [filter](#filter) of type `payload_type` matches a commit based on the type of
its [payload](#payload). A payload type filter can have one or more types
defined. The filter will match if the commit's payload type matches at least one
of the defined types.
A payload type filter with only one matching type can be defined like this:
```yaml
type: payload_type
payload_type: comment
```
A payload type filter with multiple matching types can be defined like this:
```yaml
type: payload_type
payload_types:
- comment
- change
```
###### Commit Attributes Filter {#commit-attributes-filter}
A [filter](#filter) of type `commit_attributes` matches a commit based on
certain attributes it has. A commit attributes filter may have one or more
fields defined, each corresponding to a different attribute the commit may have.
If more than one field is defined then all corresponding attributes on the
commit must match for the filter to match.
Currently the only possible attribute is `non_fast_forward: true`, which matches
a commit which is not an ancestor of the HEAD of the branch it's being pushed
onto. This attribute only makes sense in the context of a pre-receive git hook.
A commit attributes filter looks like this:
```yaml
type: commit_attributes
non_fast_forward: true
```
###### Not Filter {#not-filter}
A [filter](#filter) of type `not` matches a commit using the negation of a
sub-filter, defined within the not filter. If the sub-filter returns true for
the commit, then the not filter returns false, and vice-versa.
A not filter looks like this:
```
type: not
filter:
# a branch filter is used as the sub-filter in this example
type: branch
pattern: main
```
##### Default Access Controls {#default-access-controls}
These [access controls](#access-control) will be implicitly appended to the list
defined in the [configuration](#config-yml):
```yaml
# Any account may add any commit to any non-main branch, provided there is at
# least one signature credential. This includes non-fast-forwards.
- action: allow
filters:
- type: not
filter:
type: branch
pattern: main
- type: signature
any_account: true
count: 1
# Non-fast-forwards are denied in all other cases. In effect, one cannot
# force-push onto the main branch.
- action: deny
filters:
- type: commit_attributes
non_fast_forward: true
# Any account may add any change commit to the main branch, provided there is
# at least one signature credential.
- action: allow
filters:
- type: branch
pattern: main
- type: payload_type
payload_type: change
- type: signature
any_account: true
count: 1
# All other actions are denied.
- action: deny
```
These default access controls provide a useful baseline of requirements that all
[projects](#project) will (hopefully) find useful in their infancy.
## Commit Verification {#commit-verification}
The dehub protocol is designed such that every commit is "verifiable". A
verifiable commit has the following properties:
* Its [fingerprint](#fingerprint) is correctly formed.
* All of its [credentials](#credential) are correctly formed.
* If they are signatures, they are valid signatures of the commit's
unencoded fingerprint.
* The project's [access controls](#access-control) allow the commit.
The [project's configuration](#config-yml) is referenced frequently when
verifying a commit, such as when determining which access controls to apply and
discovering [signifiers](#signifier) of [accounts](#account). In all cases the
configuration as defined in the commit's _parent_ is used when verifying that
commit. The exception is the [prime commit](#prime-commit), which uses its own
configuration.
### Prime Commit {#prime-commit}
The prime commit is the trusted seed of the [project](#project). When a user
clones and verifies a dehub project they must, implicitly or explicitly, trust
the contents of the prime commit. All other commits must be ancestors of the
prime commit.
Manually specifying a prime commit is not currently spec'd, but it will be.
By default the prime commit is the root commit of the `main` branch.

View File

@ -1,128 +0,0 @@
# Tutorial 0: Say Hello!
This tutorial will guide you through cloning a dehub project locally, creating a
comment, and pushing that comment back up to the remote. The project in
question: dehub itself!
This tutorial assumes you have [dehub installed](/index.html#getting-started),
you have git and gpg installed, and you have a gpg key already created.
## Step 0: Clone the Project
Cloning the dehub project is as simple as cloning its git repo:
```
git clone https://dehub.dev/src/dehub.git
cd dehub
```
Once cloned, feel free to look around the project. You should initially find
yourself on the `main` branch, the primary branch of most dehub projects
(analogous to the `master` branch of most git repos).
Calling `git log` will show the commits messages for all commits in the branch.
You will notice the commit messages aren't formatted in the familiar way, for
example:
```
commit 351048e9aabef7dc0f99b00f02547e409859a33f
Author: mediocregopher <>
Date: Sat Apr 25 15:17:21 2020 -0600
Completely rewrite SPEC
---
type: change
description: |-
Completely rewrite SPEC
It's good this time, and complete. After this rewrite it will be necessary to
update a lot of the code, since quite a few things got renamed.
fingerprint: AG0s3yILU+0uIZltVY7A9/cgxr/pXk2MzGwExsY/hbIc
credentials:
- type: pgp_signature
pub_key_id: 95C46FA6A41148AC
body: BIG LONG STRING
account: mediocregopher
```
Instead of just being a human-readable description they are YAML encoded payload
objects. We will dive into these payload objects more throughout this tutorial
series.
## Step 1: Checkout the Welcome Branch
Next you're going to checkout the public welcome branch. This is done through a
normal git checkout command:
```
git checkout public/welcome
```
You can do `git log` to see all the comments people have been leaving in this
branch. The `public/welcome` branch is differentiated from the `main` branch in
two ways:
* It has been configured to allow comment commits from anonymous users to be
pushed to it. Project configuration is covered in a future tutorial.
* It has no code files tracked, its only purpose is for comments.
## Step 2: Create Your Comment
Now that you've poked around the welcome branch a bit, it's time to leave a
comment of your own! This is as easy as doing:
```
dehub commit --anon-pgp-key=KEY_NAME comment
```
(`KEY_NAME` should be replaced with any selector which will match your pgp key,
such as the key ID, the name on the key, or the email.)
Your default text editor (defined by the EDITOR environment variable) will pop
up and you can then write down your comment. When you save and close your editor
dehub will sign the comment with your pgp key and create a commit with it.
If you're having trouble thinking of something to say, here's some prompts to
get you going:
* Introduce yourself; say where you're from and what your interests are.
* How did you find dehub? Why is it interesting to you?
* If you're using dehub for a project, shill your project!
* If you'd like to get involved in dehub's development, let us know what your
skills are and how you can help. Remember, it takes more than expert
programmers to make a project successful.
Once you've created your commit you can call `git log` to verify that it's been
created to your liking. If there's anything about the comment you'd like to
change you can amend the commit like so:
```
dehub commit --anon-pgp-key=KEY_NAME comment --amend
```
## Step 3: Push Your Commit
As of now your comment commit only exists on your local machine. For everyone
else to see it you'll need to push it to the dehub server, exactly like with a
normal git commit. Pushing is done in the same way as a normal git commit as
well: `git push`.
If you receive an error that's like `Updates were rejected because the tip of
your current branch is behind` then someone else has pushed to the branch in
between the last time you pulled and now. Do a `git pull --rebase` to pull in
those new changes, and try pushing again.
## Step 4: Follow the Conversation
In order to see other people's responses to your comment, and all other parts of
the conversation, all you need to do is call `git pull` with the
`public/welcome` branch checked out.
You now have all the tools needed to participate in a dehub discussion thread!
Continue on to [Tutorial 1](tut1.html) to set up your own dehub project and
learn about credentials and their verification.

View File

@ -1,178 +0,0 @@
# Tutorial 1: Create Your Own Project
This tutorial will guide you through starting a dehub project of your own, as
well as introducing some basic concepts regarding how commit payloads work. You
will use an example hello world project to do this.
This tutorial assumes you have already completed [Tutorial 0](tut0.html).
## Step 0: Init the Project
A dehub project is initialized in the same way as a git project. An empty
directory is created, and `dehub init` is run within that directory.
```
mkdir hello-world
cd hello-world
dehub init
```
`dehub init` does nearly exactly the same thing as `git init`, with the primary
difference being that it sets the initial branch to be `main` instead of
`master`. dehub makes a distinction between `main` and `master` in order to help
prevent confusion between dehub and vanilla git projects, as well as to avoid
conflicts when migrating vanilla git projects to dehub.
## Step 1: Add the First Account
A dehub project is not fully initialized until it has an account defined for it.
dehub accounts refer to a specific user who has some kind of access to the
project. Each account can have specific permissions for it, as well as multiple
ways of signifying itself.
For now, you'll add a basic account `tut` with a pgp key signifier. First,
create the `.dehub` directory, which is where all dehub project configuration
goes, and put your pgp key there:
```
mkdir .dehub
gpg -a --export KEY_ID > .dehub/tut.asc
```
Next you'll create the `.dehub/config.yml` file, which is where accounts are
actually defined (amongst many other things). The file should have the following
contents:
```yaml
# contents of .dehub/config.yml
---
accounts:
- id: tut
signifiers:
- type: pgp_public_key_file
path: ".dehub/tut.asc"
```
Finally, you'll commit these changes and the project will have its first commit!
Committing changes works very similarly to committing comments (as you did in
[Tutorial 0](tut0.html)). Where a comment commit merely carries a user's
comment, a change commit describes a set of changes to the tracked files in the
git repo.
```
git add --all
dehub commit --as tut change
```
Like when you made a comment commit, this will pop up with your editor asking
for a description of the changes. Fill it in with something like `Initialize the
project` and save+close the editor. Depending on your pgp key settings you'll
likely be prompted for your pgp key password at this point. After that the
commit has been created!
## Step 2: Inspect the Payload
In this step you're going to look at the commit you just created and learn about
the contents of the payload. To view the commit do `git show`. Something similar
to the following should be output as the commit message:
```
commit 3cdcbc19546d4e6d817ebfba3e18afbc23283ec0
Author: username <>
Date: Sat Apr 25 15:17:21 2020 -0600
Initialize the project
---
type: change
description: Initialize the project
fingerprint: AG0s3yILU+0uIZltVY7A9/cgxr/pXk2MzGwExsY/hbIc
credentials:
- type: pgp_signature
pub_key_id: 95C46FA6A41148AC
body: BIG LONG STRING
account: tut
```
All commits in a dehub project will contain a similar looking message. The first
line (the head) is always a human readable description of the commit. In this
case our commit description itself, `Initialize the project`, was used.
After the head comes the payload, which is always a YAML encoded object. All
payloads have a `type` field indicating what type of payload they are. That type
will determine what other fields the payload is expected to have. The other
fields in this payload object are:
* `description`: This is the description which was input into the editor when
creating the change commit.
* `fingerprint`: A unique descriptor for this set of changes. It is computed
using both `description` and the files changed.
* `credentials`: A set of credentials for this commit, each one declaring
that this commit has been given approval by a user. This commit has one
`pgp_signature` credential, created by the `tut` account. The `body` is a
signature of the `fingerprint` created by the `tut`'s pgp key.
## Step 3: Create Another Commit
Now that the initial commit is created, and configuration has been added to the
dehub project, you can continue on to use the project for what it was intended
for: greeting the world!
Add a simple "hello world" script to the project by doing:
```
echo 'echo "hello world"' > hello.sh
git add hello.sh
dehub commit --as tut change --descr 'add hello.sh'
```
You'll notice that this time around you used the `--descr` flag to declare the
change's description, rather than opening up the editor
Once again you can inspect the payload you just created using `git show`, if
you'd like, or continue on to the next step to learn about commit verification.
## Step 4: Verify Your Commits
All this work to create YAML encoded payloads has been done for one primary
purpose: to make commits verifiable. A verifiable commit is one which follows
the access controls defined by its parent.
Your dehub project doesn't have any explicitly defined access controls (that
will be covered in a future tutorial), and so the defaults are used. By default,
dehub requires that all commits in `main` are change commits which have been
signed by at least one account.
In order to verify the HEAD commit you can do:
```
dehub verify
```
This command looks at the project configuration defined in the parent of HEAD
and verifies that HEAD conforms to it. The HEAD of your project is a change
commit signed by the account `tut`, and so should be verifiable.
Arbitrary commits can be verified using the `--rev` flag. This command will
verify the parent of HEAD, i.e. the initial commit:
```
dehub verify --rev HEAD^
```
The initial commit doesn't have a parent, and so is a special case for
verification. The initial commit uses the configuration defined within itself in
order to verify itself. This creates an exploit opportunity: if you clone a
remote dehub project and an attacker intercepts that request they will be able
to send you back a project with a different initial commit than what you
expected. The whole project will still be verifiable, even though it's been
compromised. For this reason it's important to manually verify that the initial
commit of projects you clone are configured correctly, using the expected
signifiers for the expected accounts.
You are now able to initialize a project, configure accounts within it, commit
changes to its files, and verify those commits. Well done! Continue on to
[Tutorial 2](tut2.html), where you will learn how to configure dehub's access
controls.

View File

@ -1,262 +0,0 @@
# Tutorial 2: Access Controls
Access controls, in the context of a dehub project, refer to configuration
defining who is allowed to do what. These controls are defined within the dehub
project itself, within the `.dehub/config.yml` file. This tutorial will guide
you through the basics of how access controls work, how to define them, and some
examples of what can be done with them.
This tutorial assumes you have already completed [Tutorial 1](tut1.html), and
builds on top of the project which was started there.
## Step 0: Create a Restricted Account
Inside the project you started in [Tutorial 1](tut1.html) you're going to add
another account to the project, called `tot`. Initially, `tot` will have all the
same permissions as `tut`, except being allowed to modify the project
configuration.
First, export your gpg key into the project for `tot` to use, the same key used
for `tut`:
```
gpg -a --export KEY_ID > .dehub/tot.asc
```
(For the purposes of a tutorial it's fine for two accounts to share a
key, but it's not something which generally makes sense to do.)
Now, modify the `.dehub/config.yml` to have the following contents:
```yaml
# contents of .dehub/config.yml
---
accounts:
- id: tut
signifiers:
- type: pgp_public_key_file
path: ".dehub/tut.asc"
- id: tot
signifiers:
- type: pgp_public_key_file
path: ".dehub/tot.asc"
access_controls:
- action: allow
filters:
- type: signature
account_ids:
- tut
- type: files_changed
pattern: .dehub/*
- action: deny
filters:
- type: files_changed
pattern: .dehub/*
```
The `accounts` section has been modified to add the `tot` account, but the
primary change here has been to add the `access_controls` section. The next
sub-sections will explain what exactly is being done here, but for now go ahead
and commit these changes:
```
git add --all
dehub commit --as tut change --descr 'add new restricted tot account'
```
### Access Controls
Each access control is an action+filters pair. For any commit being verified,
the access controls defined in its parent commit are iterated through, in order,
until one is found whose filters all match the commit being verified. The action
for that access control, either `allow` or `deny`, is then taken.
If no access controls are defined, or none match, then the default access
controls are used. These are explicitly defined in the
[SPEC](SPEC.html#default-access-controls), but the general effect of them is to
require that all commits have one signature from any of the project's accounts.
### Access Control Filters
There are many different filter types, so only the ones used in the tutorial
will be explained. An exhaustive listing can be found in the
[SPEC](SPEC.html#filter).
The `signature` filter matches commits which have a signature credential created
by any one of the specified accounts. The `files_changed` filter matches commits
which have changed files whose paths match the specified patterns (relative to
the project's root).
### Putting it Together
The first of the new actions controls you've defined is:
```
- action: allow
filters:
- type: signature
account_ids:
- tut
- type: files_changed
pattern: .dehub/*
```
This allows any commits which have been signed by `tut` and which modify any of
the files in `.dehub/*`. The second access control is:
```
- action: deny
filters:
- type: files_changed
pattern: .dehub/*
```
This denies any commits which modify any of the files in `.dehub/*`. If a commit
does not match the first access control, but does match this second access
control, it can be assumed that the commit does _not_ have a signature from
`tut` (because that's the only difference between them). Therefore, the effect
of these two controls put together is to only allow `tut` to make changes to the
`.dehub` directory's files.
## Step 1: Test the Restrictions
Let's say that your new user `tot` is having a bit of rebellious phase, and
wants to kick `tut` out of the project. Change `.dehub/config.yml` to have the
following contents (note that `accounts` has been left the same and so is mostly
elided):
```
# abbreviated contents of .dehub/config.yml
---
accounts:
...
access_controls:
- action: deny
filters:
- type: signature
account_ids:
- tut
```
So edgy. Make the commit for `tot`, being sure that the value for the `--as`
flag indicates you're committing _as_ `tot`:
```
git add --all
dehub commit --as tot change --descr 'tut is a butt'
```
Somewhat unexpectedly, the commit has been created! You can see it by doing `git
show`. This shouldn't be possible though, because the previous commit disallowed
anyone but `tut` from changing files within the `.dehub/` directory. Is dehub
broken?
The fact is that, regardless of whether or not the `dehub` tool allows one to
create this commit, `tot` can create this commit. The important thing is that
`tut` is able to notice that it's been created and do something about it. In a
real-world situation, both `tot` and `tut` would be using different computers,
and when `tut` (or anyone else) receives the commit from `tot` they will try to
verify it, fail to do so, and ignore it.
If you perform `dehub verify` you will be greeted with the following error:
```
exiting: blah blah blah: commit matched and denied by this access control:
action: deny
filters:
- type: files_changed
pattern: .dehub/*
```
Because the parent of this commit's config disallows this commit (via the given
access control) it is not verifiable. Go ahead and delete the commit by doing:
```
git reset --hard "$(git rev-list HEAD | tail -3 | head -n1)"
```
## Step 2: Different Restrictions
In light of `tot`'s recent actions it might be prudent to pull back their
permissions a bit. Go ahead and change the `.dehub/config.yml` to:
```
# abbreviated contents of .dehub/config.yml
---
accounts:
...
access_controls:
- action: allow
filters:
- type: signature
account_ids:
- tot
- type: branch
pattern: tot/*
- action: deny
filters:
- type: signature
account_ids:
- tot
```
and commit the change:
```
git add --all
dehub commit --as tut change --descr 'restrict tot to non-main branches'
```
After this, `tot` will still be able to interact with the project, but only
within branches whose names have the prefix `tot/`; the `main` branch remains
open to other accounts, such as `tut`, due to the default access controls.
### Check the New Restrictions
`tot` has decided to do something constructive and wants to make a shell script
which wraps the `echo` command. So helpful. Make a new branch for `tot` to use,
and create a commit on it:
```
git checkout -b tot/echo-script
echo 'echo "$@"' > echo.sh
git add echo.sh
dehub commit --as tot change --descr "added echo.sh script"
```
Check that the commit verifies (it should, since it's on a branch with the
prefix `tot/`):
```
dehub verify
```
Now, as a final sanity check, you'll cherry-pick the commit onto `main` and
ensure that it does _not_ verify there.
```
git checkout main
git cherry-pick tot/echo-script
```
Running `dehub verify` now should fail, even though the commit remains the same.
The only difference is the branch name; the commit is allowed in branches with
the prefix `tot/`, and disallowed otherwise.
Finally, reverse that cherry-pick to make `main` verifiable again:
```
git reset --hard "$(git rev-list HEAD | tail -4 | head -n1)"
```
You now have an understanding of how dehub's access controls work. Access
controls are extremely flexible and can be formulated to fit a wide-variety of
use-cases. In [Tutorial 3](tut3.html) we'll see how access controls can be
formulated to allow for commit sign-offs, where multiple accounts must accredit
a commit before it can be verified, and how such a commit can be created.

View File

@ -1,246 +0,0 @@
# Tutorial 3: Commit Sign-Off
Commit sign-off is a common pattern in vanilla git projects, where a commit must
be approved by one or more people (besides the commit author themselves) in
order to be allowed into the primary branch.
dehub is able to accomplish this same pattern using only the access controls
which have already been covered in this tutorial series and a command which has
not: `dehub combine`. This tutorial will guide you through using `dehub combine`
to facilitate commit sign-off.
This tutorial assumes you have already completed [Tutorial 2](tut2.html), and
builds on top of the project which was started there.
## Step 0: Loosen the Previous Restrictions
In the [previous tutorial](tut2.html) you took an existing project, added a new
user `tot` to it, and then restricted `tot` to only be allowed to make commits
in a certain subset of branches which excluded the `main` branch.
As seen in that tutorial, `tot` is not able to create commits for the `main`
branch _at all_. In this tutorial we're going to open `main` back up to `tot`,
but only with a very important caveat: `tot`'s commits must be approved by
someone else.
In the `hello-world` project which was used for previous tutorials, with the
`main` branch checked out, go ahead and modify `.dehub/config.yml` to have the
following contents:
```
# contents of .dehub/config.yml
---
accounts:
- id: tut
signifiers:
- type: pgp_public_key_file
path: ".dehub/tut.asc"
- id: tot
signifiers:
- type: pgp_public_key_file
path: ".dehub/tot.asc"
access_controls:
- action: allow
filters:
- type: signature
account_ids:
- tot
- type: branch
pattern: tot/*
- action: deny
filters:
- type: branch
pattern: main
- type: not
filter:
type: signature
any_account: true
count: 2
```
and commit the changes:
```
git add .dehub/config.yml
dehub commit --as tut change --descr 'require commit sign-offs in main'
```
The primary change was to replace the old access control denying `tot` the
ability to commit to anything (outside of `tot/*` branches) with this one:
```
- action: deny
filters:
- type: branch
pattern: main
- type: not
filter:
type: signature
any_account: true
count: 2
```
There are two new things here. The first is the new fields on the `signature`
filter: `any_account` replaces the `account_ids` field, and refers to any
account which is defined in the `accounts` section; `count` declares how many
accounts must have a signature on the commit for the filter to match (if not
specified it defaults to 1).
The second new thing is the `not` filter: `not` wraps any other filter, and
reverses whether or not it matches. In this case, it's wrapping our `signature`
filter, such that this access control will match only if the commit _does not_
have signature credentials from 2 different accounts.
The total effect of this access control is to deny any commits to `main` which
have not been signed-off by 2 different accounts.
## Step 1: Some Changes to Merge
In the previous tutorial `tot` created a new script, `echo.sh`, in a new branch
called `tot/echo-script`. Check that branch out, rebase it on `main` (this will
help in later steps), and add another script to it:
```
git checkout tot/echo-script
git rebase main
echo 'echo "$@" | awk "{ print toupper(\$0) }"' > echo-upper.sh
git add echo-upper.sh
dehub commit --as tot change --descr 'echo-upper.sh'
```
Now the `tot/echo-script` branch contains two commits which aren't on `main`,
both of them signed by `tot`. What will happen next is that the branch's commits
will be combined into a single commit, be given accreditation by both `tut` and
`tot`, and added to the `main` branch.
## Step 2: Accreditation
First, `tot` will accredit both commits, and unify the two descriptions in the
process. To do this, you will create your first `credential` commit:
```
dehub commit --as tot credential --start HEAD^^ --descr 'add echo.sh and echo-upper.sh'
```
A `credential` commit, at its core, contains nothing except credentials for any
arbitrary fingerprint. To view the credential commit you just made
do: `git show`. You should see a commit message like:
```
Credential of AO3dn4Se61hq6OWy4Lm6m3MxdT2ru6TrIobuHaWJJidt
---
type: credential
commits:
- f085f13fa839ece122476601d970460ac249dc69 # these will be different
- 40a81ffb4f52dc4149570672f7f7fc053f12226a
change_description: add echo.sh and echo-upper.sh
fingerprint: AO3dn4Se61hq6OWy4Lm6m3MxdT2ru6TrIobuHaWJJidt
credentials:
- type: pgp_signature
pub_key_id: XXX
body: BIG LONG STRING
account: tot
```
You'll notice that the credential commit's fingerprint is different than either
of the two commits it accredits. This is the fingerprint is based on the
_combination_ of the two commits; it is based on the total of the file changes
and the description provided by the user. The two commits are enumerated in the
`commits` field of the payload, and the description provided by the user is
stored in the `change_description` field.
The combined commits have now been accredited by `tot`, but not `tut`, and so
they still lack a necessary credential. Have `tut` make a credential now:
```
dehub commit --as tut credential --rev HEAD
```
This form of the `credential` sub-command only accredits a single commit. When a
single commit is accredited and it itself is a credential commit then the new
commit which is created is merely a copy of the specified credential commit with
the caller's own credential appended to the `credentials` list. You can see this
with `git show`, which should look like:
```
Credential of AO3dn4Se61hq6OWy4Lm6m3MxdT2ru6TrIobuHaWJJidt
---
type: credential
commits:
- f085f13fa839ece122476601d970460ac249dc69 # these will be different
- 40a81ffb4f52dc4149570672f7f7fc053f12226a
change_description: add echo.sh and echo-upper.sh
fingerprint: AO3dn4Se61hq6OWy4Lm6m3MxdT2ru6TrIobuHaWJJidt
credentials:
- type: pgp_signature
pub_key_id: XXX
body: BIG LONG STRING
account: tot
- type: pgp_signature
pub_key_id: XXX
body: BIG LONG STRING
account: tut
```
There are now enough credentials to combine the commits in the `tot/echo-script`
branch into a single commit on the `main` branch.
## Step 3: Combination
At this point the `tot/echo-script` branch has the following elements in place:
* Two change commits, which we want to combine and bring over to `main`.
* A credential commit made by `tot` for the combined changes.
* A credential commit made by `tut` for the combined changes, which includes
`tot`'s credentials.
Combining the commits and placing them on `main` is done with a single command:
```
dehub combine --start HEAD^^^^ --end HEAD --onto main
```
This `combine` command combines all changes made within the given commit range,
the last change description found in that range (in this case it will be from
`tut`'s credential commit), and all credentials for that set of changes. The
command combines them into a single commit which it places on the `main` branch.
You can see the commit you've just created by doing:
```
git checkout main
git show
```
The commit should contain both of the new files, and the message should look
something like:
```
add echo.sh and echo-upper.sh
---
type: change
description: add echo.sh and echo-upper.sh
fingerprint: ALOcEuKJkgIdz27z0fjF1NEbK6Y9cEh2RH4/sL3uf3oa
credentials:
- type: pgp_signature
pub_key_id: XXX
body: BIG LONG BODY
account: tot
- type: pgp_signature
pub_key_id: XXX
body: BIG LONG BODY
account: tut
```
The commit is accredited by two different accounts, and so is allowed to be on
the `main` branch. This can be verified by doing `dehub verify`.
You now are able to require commit sign-off and create signed-off commits! The
access control settings surrounding commit sign-offs are entirely up to you and
your project's needs. You can require sign-off from specific accounts, any
accounts, only on specific files, only in certain branches, etc... all using the
same basic access control building blocks.

View File

@ -1,83 +0,0 @@
package dehub
import (
"crypto/sha256"
"encoding/binary"
"fmt"
"hash"
"sort"
)
var (
defaultHashHelperAlgo = sha256.New
)
type hashHelper struct {
hash hash.Hash
varintBuf []byte
}
// if h is nil it then defaultHashHelperAlgo will be used
func newHashHelper(h hash.Hash) *hashHelper {
if h == nil {
h = defaultHashHelperAlgo()
}
s := &hashHelper{
hash: h,
varintBuf: make([]byte, binary.MaxVarintLen64),
}
return s
}
func (s *hashHelper) sum(prefix []byte) []byte {
out := make([]byte, len(prefix), len(prefix)+s.hash.Size())
copy(out, prefix)
return s.hash.Sum(out)
}
func (s *hashHelper) writeUint(i uint64) {
n := binary.PutUvarint(s.varintBuf, i)
if _, err := s.hash.Write(s.varintBuf[:n]); err != nil {
panic(fmt.Sprintf("error writing %x to %T: %v", s.varintBuf[:n], s.hash, err))
}
}
func (s *hashHelper) writeStr(str string) {
s.writeUint(uint64(len(str)))
s.hash.Write([]byte(str))
}
func (s *hashHelper) writeChangedFiles(changedFiles []ChangedFile) {
sort.Slice(changedFiles, func(i, j int) bool {
return changedFiles[i].Path < changedFiles[j].Path
})
s.writeUint(uint64(len(changedFiles)))
for _, fileChanged := range changedFiles {
s.writeStr(fileChanged.Path)
s.hash.Write(fileChanged.FromMode.Bytes())
s.hash.Write(fileChanged.FromHash[:])
s.hash.Write(fileChanged.ToMode.Bytes())
s.hash.Write(fileChanged.ToHash[:])
}
}
var (
changeHashVersion = []byte{0}
commentHashVersion = []byte{0}
)
// if h is nil it then defaultHashHelperAlgo will be used
func genChangeFingerprint(h hash.Hash, msg string, changedFiles []ChangedFile) []byte {
s := newHashHelper(h)
s.writeStr(msg)
s.writeChangedFiles(changedFiles)
return s.sum(changeHashVersion)
}
// if h is nil it then defaultHashHelperAlgo will be used
func genCommentFingerprint(h hash.Hash, comment string) []byte {
s := newHashHelper(h)
s.writeStr(comment)
return s.sum(commentHashVersion)
}

View File

@ -1,237 +0,0 @@
package dehub
import (
"bytes"
"encoding/binary"
"hash"
"testing"
"gopkg.in/src-d/go-git.v4/plumbing"
"gopkg.in/src-d/go-git.v4/plumbing/filemode"
)
type testHash struct {
bytes.Buffer
}
var _ hash.Hash = new(testHash)
func (th *testHash) Sum(b []byte) []byte {
return append(b, th.Buffer.Bytes()...)
}
func (th *testHash) Size() int {
return th.Buffer.Len()
}
func (th *testHash) BlockSize() int {
return 1
}
func (th *testHash) assertContents(t *testing.T, parts [][]byte) {
b := th.Buffer.Bytes()
for _, part := range parts {
if len(part) > len(b) || !bytes.Equal(part, b[:len(part)]) {
t.Fatalf("expected %q but only found %q", part, b)
}
b = b[len(part):]
}
if len(b) != 0 {
t.Fatalf("unexpected extra bytes written to testHash: %q", b)
}
}
func uvarint(i uint64) []byte {
buf := make([]byte, binary.MaxVarintLen64)
n := binary.PutUvarint(buf, i)
return buf[:n]
}
func TestGenCommentFingerprint(t *testing.T) {
type test struct {
descr string
comment string
exp [][]byte
}
tests := []test{
{
descr: "empty comment",
comment: "",
exp: [][]byte{uvarint(0)},
},
{
descr: "normal comment",
comment: "this is a normal comment",
exp: [][]byte{uvarint(24), []byte("this is a normal comment")},
},
{
descr: "comment with unicode",
comment: "sick comment ⚡",
exp: [][]byte{uvarint(16), []byte("sick comment ⚡")},
},
}
for _, test := range tests {
t.Run(test.descr, func(t *testing.T) {
th := new(testHash)
genCommentFingerprint(th, test.comment)
th.assertContents(t, test.exp)
})
}
}
func TestGenChangeFingerprint(t *testing.T) {
type test struct {
descr string
msg string
changedFiles []ChangedFile
exp [][]byte
}
hash := func(i byte) plumbing.Hash {
var h plumbing.Hash
h[0] = i
return h
}
hashB := func(i byte) []byte {
h := hash(i)
return h[:]
}
tests := []test{
{
descr: "empty",
msg: "",
changedFiles: nil,
exp: [][]byte{uvarint(0), uvarint(0)},
},
{
descr: "empty changes",
msg: "some msg",
changedFiles: nil,
exp: [][]byte{uvarint(8), []byte("some msg"), uvarint(0)},
},
{
descr: "empty msg",
msg: "",
changedFiles: []ChangedFile{{
Path: "foo",
ToMode: filemode.Regular, ToHash: hash(1),
}},
exp: [][]byte{uvarint(0), uvarint(1),
uvarint(3), []byte("foo"),
filemode.Empty.Bytes(), hashB(0),
filemode.Regular.Bytes(), hashB(1)},
},
{
descr: "files added",
msg: "a",
changedFiles: []ChangedFile{
{
Path: "foo",
ToMode: filemode.Regular, ToHash: hash(1),
},
{
Path: "somedir/bar",
ToMode: filemode.Executable, ToHash: hash(2),
},
},
exp: [][]byte{uvarint(1), []byte("a"), uvarint(2),
uvarint(3), []byte("foo"),
filemode.Empty.Bytes(), hashB(0),
filemode.Regular.Bytes(), hashB(1),
uvarint(11), []byte("somedir/bar"),
filemode.Empty.Bytes(), hashB(0),
filemode.Executable.Bytes(), hashB(2),
},
},
{
descr: "files added (unordered)",
msg: "a",
changedFiles: []ChangedFile{
{
Path: "somedir/bar",
ToMode: filemode.Executable, ToHash: hash(2),
},
{
Path: "foo",
ToMode: filemode.Regular, ToHash: hash(1),
},
},
exp: [][]byte{uvarint(1), []byte("a"), uvarint(2),
uvarint(3), []byte("foo"),
filemode.Empty.Bytes(), hashB(0),
filemode.Regular.Bytes(), hashB(1),
uvarint(11), []byte("somedir/bar"),
filemode.Empty.Bytes(), hashB(0),
filemode.Executable.Bytes(), hashB(2),
},
},
{
descr: "file modified",
msg: "a",
changedFiles: []ChangedFile{{
Path: "foo",
FromMode: filemode.Regular, FromHash: hash(1),
ToMode: filemode.Executable, ToHash: hash(2),
}},
exp: [][]byte{uvarint(1), []byte("a"), uvarint(1),
uvarint(3), []byte("foo"),
filemode.Regular.Bytes(), hashB(1),
filemode.Executable.Bytes(), hashB(2),
},
},
{
descr: "file removed",
msg: "a",
changedFiles: []ChangedFile{{
Path: "foo",
FromMode: filemode.Regular, FromHash: hash(1),
}},
exp: [][]byte{uvarint(1), []byte("a"), uvarint(1),
uvarint(3), []byte("foo"),
filemode.Regular.Bytes(), hashB(1),
filemode.Empty.Bytes(), hashB(0),
},
},
{
descr: "files added, modified, and removed",
msg: "aaa",
changedFiles: []ChangedFile{
{
Path: "foo",
ToMode: filemode.Regular, ToHash: hash(1),
},
{
Path: "bar",
FromMode: filemode.Regular, FromHash: hash(2),
ToMode: filemode.Regular, ToHash: hash(3),
},
{
Path: "baz",
FromMode: filemode.Executable, FromHash: hash(4),
},
},
exp: [][]byte{uvarint(3), []byte("aaa"), uvarint(3),
uvarint(3), []byte("bar"),
filemode.Regular.Bytes(), hashB(2),
filemode.Regular.Bytes(), hashB(3),
uvarint(3), []byte("baz"),
filemode.Executable.Bytes(), hashB(4),
filemode.Empty.Bytes(), hashB(0),
uvarint(3), []byte("foo"),
filemode.Empty.Bytes(), hashB(0),
filemode.Regular.Bytes(), hashB(1),
},
},
}
for _, test := range tests {
t.Run(test.descr, func(t *testing.T) {
th := new(testHash)
genChangeFingerprint(th, test.msg, test.changedFiles)
th.assertContents(t, test.exp)
})
}
}

View File

@ -1,117 +0,0 @@
package fs
import (
"path"
"sort"
"strings"
"gopkg.in/src-d/go-billy.v4"
"gopkg.in/src-d/go-git.v4/plumbing"
"gopkg.in/src-d/go-git.v4/plumbing/filemode"
"gopkg.in/src-d/go-git.v4/plumbing/format/index"
"gopkg.in/src-d/go-git.v4/plumbing/object"
"gopkg.in/src-d/go-git.v4/storage"
)
// This file is largely copied from the git-go project's worktree_commit.go @ v4.13.1
// buildTreeHelper converts a given index.Index file into multiple git objects
// reading the blobs from the given filesystem and creating the trees from the
// index structure. The created objects are pushed to a given Storer.
type buildTreeHelper struct {
fs billy.Filesystem
s storage.Storer
trees map[string]*object.Tree
entries map[string]*object.TreeEntry
}
// BuildTree builds the tree objects and push its to the storer, the hash
// of the root tree is returned.
func (h *buildTreeHelper) BuildTree(idx *index.Index) (plumbing.Hash, error) {
const rootNode = ""
h.trees = map[string]*object.Tree{rootNode: {}}
h.entries = map[string]*object.TreeEntry{}
for _, e := range idx.Entries {
if err := h.commitIndexEntry(e); err != nil {
return plumbing.ZeroHash, err
}
}
return h.copyTreeToStorageRecursive(rootNode, h.trees[rootNode])
}
func (h *buildTreeHelper) commitIndexEntry(e *index.Entry) error {
parts := strings.Split(e.Name, "/")
var fullpath string
for _, part := range parts {
parent := fullpath
fullpath = path.Join(fullpath, part)
h.doBuildTree(e, parent, fullpath)
}
return nil
}
func (h *buildTreeHelper) doBuildTree(e *index.Entry, parent, fullpath string) {
if _, ok := h.trees[fullpath]; ok {
return
}
if _, ok := h.entries[fullpath]; ok {
return
}
te := object.TreeEntry{Name: path.Base(fullpath)}
if fullpath == e.Name {
te.Mode = e.Mode
te.Hash = e.Hash
} else {
te.Mode = filemode.Dir
h.trees[fullpath] = &object.Tree{}
}
h.trees[parent].Entries = append(h.trees[parent].Entries, te)
}
type sortableEntries []object.TreeEntry
func (sortableEntries) sortName(te object.TreeEntry) string {
if te.Mode == filemode.Dir {
return te.Name + "/"
}
return te.Name
}
func (se sortableEntries) Len() int { return len(se) }
func (se sortableEntries) Less(i int, j int) bool { return se.sortName(se[i]) < se.sortName(se[j]) }
func (se sortableEntries) Swap(i int, j int) { se[i], se[j] = se[j], se[i] }
func (h *buildTreeHelper) copyTreeToStorageRecursive(parent string, t *object.Tree) (plumbing.Hash, error) {
sort.Sort(sortableEntries(t.Entries))
for i, e := range t.Entries {
if e.Mode != filemode.Dir && !e.Hash.IsZero() {
continue
}
path := path.Join(parent, e.Name)
var err error
e.Hash, err = h.copyTreeToStorageRecursive(path, h.trees[path])
if err != nil {
return plumbing.ZeroHash, err
}
t.Entries[i] = e
}
o := h.s.NewEncodedObject()
if err := t.Encode(o); err != nil {
return plumbing.ZeroHash, err
}
return h.s.SetEncodedObject(o)
}

100
fs/fs.go
View File

@ -1,100 +0,0 @@
// Package fs implements abstractions for interacting with a filesystem, either
// via a git tree, a staged index, or directly.
package fs
import (
"bytes"
"fmt"
"io"
"io/ioutil"
"os"
"gopkg.in/src-d/go-billy.v4"
"gopkg.in/src-d/go-git.v4"
"gopkg.in/src-d/go-git.v4/plumbing/object"
)
// FS is a simple interface for reading off a snapshot of a filesystem.
type FS interface {
Open(path string) (io.ReadCloser, error)
}
type treeFS struct {
tree *object.Tree
}
// FromTree wraps a git tree object to implement the FS interface. All paths
// will be relative to the root of the tree.
func FromTree(t *object.Tree) FS {
return treeFS{tree: t}
}
func (gt treeFS) Open(path string) (io.ReadCloser, error) {
f, err := gt.tree.File(path)
if err != nil {
return nil, err
}
return f.Blob.Reader()
}
type billyFS struct {
fs billy.Filesystem
}
// FromBillyFilesystem wraps a billy.Filesystem to implement the FS interface.
// All paths will be relative to the filesystem's root.
func FromBillyFilesystem(bfs billy.Filesystem) FS {
return billyFS{fs: bfs}
}
func (bfs billyFS) Open(path string) (io.ReadCloser, error) {
return bfs.fs.Open(path)
}
// FromStagedChangesTree processes the current set of staged changes into a tree
// object, and returns an FS for that tree. All paths will be relative to the
// root of the git repo.
func FromStagedChangesTree(repo *git.Repository) (FS, *object.Tree, error) {
w, err := repo.Worktree()
if err != nil {
return nil, nil, fmt.Errorf("could not open git worktree: %w", err)
}
storer := repo.Storer
idx, err := storer.Index()
if err != nil {
return nil, nil, fmt.Errorf("could not open git staging index: %w", err)
}
th := &buildTreeHelper{
fs: w.Filesystem,
s: storer,
}
treeHash, err := th.BuildTree(idx)
if err != nil {
return nil, nil, fmt.Errorf("could not build staging index tree: %w", err)
}
tree, err := repo.TreeObject(treeHash)
if err != nil {
return nil, nil, fmt.Errorf("could not get staged tree object (%q): %w", treeHash, err)
}
return FromTree(tree), tree, nil
}
// Stub is an implementation of FS based on a map of paths to the file contents
// at that path. Paths should be "clean" or they will not match with anything.
type Stub map[string][]byte
// Open implements the method for the FS interface.
func (s Stub) Open(path string) (io.ReadCloser, error) {
body, ok := s[path]
if !ok {
return nil, os.ErrNotExist
}
return ioutil.NopCloser(bytes.NewReader(body)), nil
}

12
go.mod
View File

@ -1,12 +0,0 @@
module dehub.dev/src/dehub.git
go 1.13
require (
github.com/bmatcuk/doublestar v1.2.2
github.com/davecgh/go-spew v1.1.1
golang.org/x/crypto v0.0.0-20200109152110-61a87790db17
gopkg.in/src-d/go-billy.v4 v4.3.2
gopkg.in/src-d/go-git.v4 v4.13.1
gopkg.in/yaml.v2 v2.2.7
)

82
go.sum
View File

@ -1,82 +0,0 @@
github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7 h1:uSoVVbwJiQipAclBbw+8quDsfcvFjOpI5iCf4p/cqCs=
github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/ghQa61ZWa/C2Aw3RkjiTBOix7dkqa1VLIs=
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239 h1:kFOfPq6dUM1hTo4JG6LR5AXSUEsOjtdm0kw0FtQtMJA=
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs=
github.com/bmatcuk/doublestar v1.2.2 h1:oC24CykoSAB8zd7XgruHo33E0cHJf/WhQA/7BeXj+x0=
github.com/bmatcuk/doublestar v1.2.2/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE=
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg=
github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o=
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 h1:BHsljHzVlRcyQhjrss6TZTdY2VfCqZPbv5k3iBFa2ZQ=
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc=
github.com/gliderlabs/ssh v0.2.2 h1:6zsha5zo/TWhRhwqCD3+EarCAgZ2yN28ipRnGPnwkI0=
github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0=
github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo=
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd h1:Coekwdh0v2wtGp9Gmz1Ze3eVRAWJMLokvN3QjdzCHLY=
github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/pelletier/go-buffruneio v0.2.0/go.mod h1:JkE26KsDizTr40EUHkXVtNPvgGtbSNq5BcowyYOWdKo=
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/src-d/gcfg v1.4.0 h1:xXbNR5AlLSA315x2UO+fTSSAXCDf+Ar38/6oyGbDKQ4=
github.com/src-d/gcfg v1.4.0/go.mod h1:p/UMsR43ujA89BJY9duynAwIpvqEujIH/jFlfL7jWoI=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/xanzy/ssh-agent v0.2.1 h1:TCbipTQL2JiiCprBWx9frJ2eJlCYT00NmctrHxVAr70=
github.com/xanzy/ssh-agent v0.2.1/go.mod h1:mLlQY/MoOhWBj+gOGMQkOeiEvkx+8pJSI+0Bx9h2kr4=
golang.org/x/crypto v0.0.0-20190219172222-a4c6cb3142f2/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4 h1:HuIa8hRrWRSrqYzx1qI49NNxhdi2PrY7gxVSq1JjLDc=
golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20200109152110-61a87790db17 h1:nVJ3guKA9qdkEQ3TUdXI9QSINo2CUPM/cySEvw2w8I0=
golang.org/x/crypto v0.0.0-20200109152110-61a87790db17/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190724013045-ca1201d0de80 h1:Ao/3l156eZf2AW5wK8a7/smtodRU+gha3+BeqJ69lRk=
golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190221075227-b4e8571b14e0/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e h1:D5TXcfTk7xF7hvieo4QErS3qqCB4teTffacDWr7CI+0=
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190729092621-ff9f1409240a/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/src-d/go-billy.v4 v4.3.2 h1:0SQA1pRztfTFx2miS8sA97XvooFeNOmvUenF4o0EcVg=
gopkg.in/src-d/go-billy.v4 v4.3.2/go.mod h1:nDjArDMp+XMs1aFAESLRjfGSgfvoYN0hDfzEk0GjC98=
gopkg.in/src-d/go-git-fixtures.v3 v3.5.0 h1:ivZFOIltbce2Mo8IjzUHAFoq/IylO9WHhNOAJK+LsJg=
gopkg.in/src-d/go-git-fixtures.v3 v3.5.0/go.mod h1:dLBcvytrw/TYZsNTWCnkNF2DSIlzWYqTe3rJR56Ac7g=
gopkg.in/src-d/go-git.v4 v4.13.1 h1:SRtFyV8Kxc0UP7aCHcijOMQGPxHSmMOPrzulQWolkYE=
gopkg.in/src-d/go-git.v4 v4.13.1/go.mod h1:nx5NYcxdKxq5fpltdHnPa2Exj4Sx0EclMWZQbYDu2z8=
gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
gopkg.in/yaml.v2 v2.2.7 h1:VUgggvou5XRW9mHwD/yXxIYSMtY0zoKQf/v226p2nyo=
gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=

View File

@ -1,628 +0,0 @@
package dehub
import (
"bytes"
"errors"
"fmt"
"sort"
"strings"
"time"
"dehub.dev/src/dehub.git/accessctl"
"dehub.dev/src/dehub.git/fs"
"dehub.dev/src/dehub.git/sigcred"
"dehub.dev/src/dehub.git/typeobj"
"dehub.dev/src/dehub.git/yamlutil"
"gopkg.in/src-d/go-git.v4"
"gopkg.in/src-d/go-git.v4/plumbing"
"gopkg.in/src-d/go-git.v4/plumbing/object"
yaml "gopkg.in/yaml.v2"
)
// Payload describes the methods which must be implemented by the different
// payload types. None of the methods should modify the underlying object.
type Payload interface {
// MessageHead returns the head of the commit message (i.e. the first line).
// The PayloadCommon of the outer PayloadUnion is passed in for added
// context, if necessary.
MessageHead(PayloadCommon) string
// Fingerprint returns the raw fingerprint which can be signed when
// accrediting this payload. The ChangedFile objects given describe the file
// changes between the parent commit and this commit.
//
// If this method returns nil it means that the payload has no fingerprint
// in-and-of-itself.
Fingerprint([]ChangedFile) ([]byte, error)
}
// PayloadCommon describes the fields common to all Payloads.
type PayloadCommon struct {
Fingerprint yamlutil.Blob `yaml:"fingerprint"`
Credentials []sigcred.CredentialUnion `yaml:"credentials"`
// LegacyChangeHash is no longer used, use Fingerprint instead.
LegacyChangeHash yamlutil.Blob `yaml:"change_hash,omitempty"`
}
func (cc PayloadCommon) credIDs() []string {
m := map[string]struct{}{}
for _, cred := range cc.Credentials {
if cred.AccountID != "" {
m[cred.AccountID] = struct{}{}
} else if cred.AnonID != "" {
m[cred.AnonID] = struct{}{}
}
}
s := make([]string, 0, len(m))
for id := range m {
s = append(s, id)
}
sort.Strings(s)
return s
}
func abbrevCommitMessage(msg string) string {
i := strings.Index(msg, "\n")
if i > 0 {
msg = msg[:i]
}
if len(msg) > 80 {
msg = msg[:77] + "..."
}
return msg
}
// PayloadUnion represents a single Payload of variable type. Only one field
// should be set on a PayloadUnion, unless otherwise noted.
type PayloadUnion struct {
Change *PayloadChange `type:"change,default"`
Credential *PayloadCredential `type:"credential"`
Comment *PayloadComment `type:"comment"`
// Common may be set in addition to one of the other fields.
Common PayloadCommon `yaml:",inline"`
}
// MarshalYAML implements the yaml.Marshaler interface.
func (p PayloadUnion) MarshalYAML() (interface{}, error) {
return typeobj.MarshalYAML(p)
}
// UnmarshalYAML implements the yaml.Unmarshaler interface.
func (p *PayloadUnion) UnmarshalYAML(unmarshal func(interface{}) error) error {
if err := typeobj.UnmarshalYAML(p, unmarshal); err != nil {
return err
} else if len(p.Common.LegacyChangeHash) > 0 {
p.Common.Fingerprint = p.Common.LegacyChangeHash
p.Common.LegacyChangeHash = nil
}
return nil
}
// Payload returns the Payload instance encapsulated by this PayloadUnion.
//
// This will panic if a Payload field is not populated.
func (p PayloadUnion) Payload() Payload {
el, _, err := typeobj.Element(p)
if err != nil {
panic(err)
}
return el.(Payload)
}
// Type returns the Payload's type (as would be used in its YAML "type" field).
//
// This will panic if a Payload field is not populated.
func (p PayloadUnion) Type() string {
_, typeStr, err := typeobj.Element(p)
if err != nil {
panic(err)
}
return typeStr
}
// MarshalText implements the encoding.TextMarshaler interface by returning the
// form the payload in the git commit message.
func (p PayloadUnion) MarshalText() ([]byte, error) {
msgHead := abbrevCommitMessage(p.Payload().MessageHead(p.Common))
msgBodyB, err := yaml.Marshal(p)
if err != nil {
return nil, fmt.Errorf("marshaling payload %+v as yaml: %w", p, err)
}
w := new(bytes.Buffer)
w.WriteString(msgHead)
w.WriteString("\n\n---\n")
w.Write(msgBodyB)
return w.Bytes(), nil
}
// UnmarshalText implements the encoding.TextUnmarshaler interface by decoding a
// payload object which has been encoded into a git commit message.
func (p *PayloadUnion) UnmarshalText(msg []byte) error {
i := bytes.Index(msg, []byte("\n"))
if i < 0 {
return fmt.Errorf("commit message %q is malformed, it has no body", msg)
}
msgBody := msg[i:]
if err := yaml.Unmarshal(msgBody, p); err != nil {
return fmt.Errorf("unmarshaling commit payload from yaml: %w", err)
}
return nil
}
// AccreditPayload returns the given PayloadUnion with an appended Credential
// provided by the given SignifierInterface.
func (proj *Project) AccreditPayload(payUn PayloadUnion, sig sigcred.Signifier) (PayloadUnion, error) {
headFS, err := proj.headFS()
if err != nil {
return payUn, fmt.Errorf("retrieving HEAD fs: %w", err)
}
cred, err := sig.Sign(headFS, payUn.Common.Fingerprint)
if err != nil {
return payUn, fmt.Errorf("signing fingerprint %q: %w", payUn.Common.Fingerprint, err)
}
payUn.Common.Credentials = append(payUn.Common.Credentials, cred)
return payUn, nil
}
// CommitDirectParams are the parameters to the CommitDirect method. All are
// required, unless otherwise noted.
type CommitDirectParams struct {
PayloadUnion PayloadUnion
Author string
ParentHash plumbing.Hash // can be zero if the commit has no parents (Q_Q)
GitTree *object.Tree
}
// CommitDirect constructs a git commit object and and stores it, returning the
// resulting Commit. This method does not interact with HEAD at all.
func (proj *Project) CommitDirect(params CommitDirectParams) (Commit, error) {
msgB, err := params.PayloadUnion.MarshalText()
if err != nil {
return Commit{}, fmt.Errorf("encoding payload to message string: %w", err)
}
author := object.Signature{
Name: params.Author,
When: time.Now(),
}
commit := &object.Commit{
Author: author,
Committer: author,
Message: string(msgB),
TreeHash: params.GitTree.Hash,
}
if params.ParentHash != plumbing.ZeroHash {
commit.ParentHashes = []plumbing.Hash{params.ParentHash}
}
commitObj := proj.GitRepo.Storer.NewEncodedObject()
if err := commit.Encode(commitObj); err != nil {
return Commit{}, fmt.Errorf("encoding commit object: %w", err)
}
commitHash, err := proj.GitRepo.Storer.SetEncodedObject(commitObj)
if err != nil {
return Commit{}, fmt.Errorf("setting encoded object: %w", err)
}
return proj.GetCommit(commitHash)
}
// Commit uses the given PayloadUnion to create a git commit object and commits
// it to the current HEAD, returning the full Commit.
func (proj *Project) Commit(payUn PayloadUnion) (Commit, error) {
headRef, err := proj.TraverseReferenceChain(plumbing.HEAD, func(ref *plumbing.Reference) bool {
return ref.Type() == plumbing.HashReference
})
if err != nil {
return Commit{}, fmt.Errorf("resolving HEAD to a hash reference: %w", err)
}
headRefName := headRef.Name()
headHash, err := proj.ReferenceToHash(headRefName)
if err != nil {
return Commit{}, fmt.Errorf("resolving ref %q (HEAD): %w", headRefName, err)
}
// TODO this is also used in the same way in NewCommitChange. It might make
// sense to refactor this logic out, it might not be needed in fs at all.
_, stagedTree, err := fs.FromStagedChangesTree(proj.GitRepo)
if err != nil {
return Commit{}, fmt.Errorf("getting staged changes: %w", err)
}
commit, err := proj.CommitDirect(CommitDirectParams{
PayloadUnion: payUn,
Author: strings.Join(payUn.Common.credIDs(), ", "),
ParentHash: headHash,
GitTree: stagedTree,
})
if err != nil {
return Commit{}, err
}
// now set the branch to this new commit
newHeadRef := plumbing.NewHashReference(headRefName, commit.Hash)
if err := proj.GitRepo.Storer.SetReference(newHeadRef); err != nil {
return Commit{}, fmt.Errorf("setting reference %q to new commit hash %q: %w",
headRefName, commit.Hash, err)
}
return commit, nil
}
// HasStagedChanges returns true if there are file changes which have been
// staged (e.g. via "git add").
func (proj *Project) HasStagedChanges() (bool, error) {
w, err := proj.GitRepo.Worktree()
if err != nil {
return false, fmt.Errorf("retrieving worktree: %w", err)
}
status, err := w.Status()
if err != nil {
return false, fmt.Errorf("retrieving worktree status: %w", err)
}
var any bool
for _, fileStatus := range status {
if fileStatus.Staging != git.Unmodified &&
fileStatus.Staging != git.Untracked {
any = true
break
}
}
return any, nil
}
// VerifyCommits verifies that the given commits, which are presumably on the
// given branch, are gucci.
func (proj *Project) VerifyCommits(branchName plumbing.ReferenceName, commits []Commit) error {
// this isn't strictly necessary for this method, but it helps discover bugs
// in other parts of the code.
if len(commits) == 0 {
return errors.New("cannot call VerifyCommits with empty commit slice")
}
// First determine the root of the main branch. All commits need to be an
// ancestor of it. If the main branch has not been created yet then there
// might not be a root commit yet.
var rootCommitObj *object.Commit
mainCommit, err := proj.GetCommitByRevision(plumbing.Revision(MainRefName))
if errors.Is(err, plumbing.ErrReferenceNotFound) {
// main branch hasn't been created yet. The commits can only be verified
// if they are for the main branch and they include the root commit.
if branchName != MainRefName {
return fmt.Errorf("cannot verify commits in branch %q when no main branch exists", branchName)
}
for _, commit := range commits {
if commit.Object.NumParents() == 0 {
rootCommitObj = commit.Object
break
}
}
if rootCommitObj == nil {
return errors.New("root commit of main branch cannot be determined")
}
} else if err != nil {
return fmt.Errorf("retrieving commit at HEAD of %q: %w", MainRefName.Short(), err)
} else {
rootCommitObj = mainCommit.Object
for {
if rootCommitObj.NumParents() == 0 {
break
} else if rootCommitObj.NumParents() > 1 {
return fmt.Errorf("commit %q in main branch has more than one parent", rootCommitObj.Hash)
} else if rootCommitObj, err = rootCommitObj.Parent(0); err != nil {
return fmt.Errorf("retrieving parent commit of %q: %w", rootCommitObj.Hash, err)
}
}
}
// We also need the HEAD of the given branch, if it exists.
branchCommit, err := proj.GetCommitByRevision(plumbing.Revision(branchName))
if err != nil && !errors.Is(err, plumbing.ErrReferenceNotFound) {
return fmt.Errorf("retrieving commit at HEAD of %q: %w", branchName.Short(), err)
}
for i, commit := range commits {
// It's not a requirement that the given Commits are in ancestral order,
// but usually they are; if the previous commit is the parent of this
// one we can skip a bunch of work.
var parentTree *object.Tree
var isNonFF bool
if i > 0 && commits[i-1].Hash == commit.Object.ParentHashes[0] {
parentTree = commits[i-1].TreeObject
} else if commit.Hash == rootCommitObj.Hash {
// looking at the root commit, assume it's ok
} else {
var err error
isAncestor := func(older, younger *object.Commit) bool {
var isAncestor bool
if err != nil {
return false
} else if isAncestor, err = older.IsAncestor(younger); err != nil {
err = fmt.Errorf("determining if %q is an ancestor of %q: %w",
younger.Hash, older.Hash, err)
return false
}
return isAncestor
}
ancestorOfRoot := isAncestor(rootCommitObj, commit.Object)
if branchCommit.Hash != plumbing.ZeroHash { // checking if the var was set
// this could only be a nonFF if the branch actually exists.
isNonFF = !isAncestor(branchCommit.Object, commit.Object)
}
if err != nil {
return err
} else if !ancestorOfRoot {
return fmt.Errorf("commit %q must be direct descendant of root commit of %q (%q)",
commit.Hash, MainRefName.Short(), rootCommitObj.Hash,
)
}
}
if err := proj.verifyCommit(branchName, commit, parentTree, isNonFF); err != nil {
return fmt.Errorf("verifying commit %q: %w", commit.Hash, err)
}
}
return nil
}
// parentTree returns the tree of the parent commit of the given commit. If the
// given commit has no parents then a bare tree is returned.
func (proj *Project) parentTree(commitObj *object.Commit) (*object.Tree, error) {
switch commitObj.NumParents() {
case 0:
return new(object.Tree), nil
case 1:
if parentCommitObj, err := commitObj.Parent(0); err != nil {
return nil, fmt.Errorf("getting parent commit %q: %w",
commitObj.ParentHashes[0], err)
} else if parentTree, err := proj.GitRepo.TreeObject(parentCommitObj.TreeHash); err != nil {
return nil, fmt.Errorf("getting parent tree object %q: %w",
parentCommitObj.TreeHash, err)
} else {
return parentTree, nil
}
default:
return nil, errors.New("commit has multiple parents")
}
}
// if parentTree is nil then it will be inferred.
func (proj *Project) verifyCommit(
branchName plumbing.ReferenceName,
commit Commit,
parentTree *object.Tree,
isNonFF bool,
) error {
if parentTree == nil {
var err error
if parentTree, err = proj.parentTree(commit.Object); err != nil {
return fmt.Errorf("retrieving parent tree of commit: %w", err)
}
}
var sigFS fs.FS
if commit.Object.NumParents() == 0 {
sigFS = fs.FromTree(commit.TreeObject)
} else {
sigFS = fs.FromTree(parentTree)
}
cfg, err := proj.loadConfig(sigFS)
if err != nil {
return fmt.Errorf("loading config of parent %q: %w", commit.Object.ParentHashes[0], err)
}
// assert access controls
changedFiles, err := ChangedFilesBetweenTrees(parentTree, commit.TreeObject)
if err != nil {
return fmt.Errorf("calculating diff from tree %q to tree %q: %w",
parentTree.Hash, commit.TreeObject.Hash, err)
} else if len(changedFiles) > 0 && commit.Payload.Change == nil {
return errors.New("files changes but commit is not a change commit")
}
pathsChanged := make([]string, len(changedFiles))
for i := range changedFiles {
pathsChanged[i] = changedFiles[i].Path
}
commitType := commit.Payload.Type()
err = accessctl.AssertCanCommit(cfg.AccessControls, accessctl.CommitRequest{
Type: commitType,
Branch: branchName.Short(),
Credentials: commit.Payload.Common.Credentials,
FilesChanged: pathsChanged,
NonFastForward: isNonFF,
})
if err != nil {
return fmt.Errorf("asserting access controls: %w", err)
}
// ensure the fingerprint is what it's expected to be
storedFingerprint := commit.Payload.Common.Fingerprint
expectedFingerprint, err := commit.Payload.Payload().Fingerprint(changedFiles)
if err != nil {
return fmt.Errorf("calculating expected payload fingerprint: %w", err)
} else if expectedFingerprint == nil {
// the payload doesn't have a fingerprint of its own, it's just carrying
// one, so no point in checking if it's "correct".
} else if !bytes.Equal(storedFingerprint, expectedFingerprint) {
return fmt.Errorf("unexpected fingerprint in payload, is %q but should be %q",
storedFingerprint, yamlutil.Blob(expectedFingerprint))
}
// verify all credentials
for _, cred := range commit.Payload.Common.Credentials {
if cred.AccountID == "" {
if err := cred.SelfVerify(storedFingerprint); err != nil {
return fmt.Errorf("verifying credential %+v: %w", cred, err)
}
} else {
sig, err := proj.signifierForCredential(sigFS, cred)
if err != nil {
return fmt.Errorf("finding signifier for credential %+v: %w", cred, err)
} else if err := sig.Verify(sigFS, storedFingerprint, cred); err != nil {
return fmt.Errorf("verifying credential %+v: %w", cred, err)
}
}
}
return nil
}
// LastChangeDescription iterates over the given commits in reverse order and
// returns the first change description it comes across. A change description
// may come from a change payload or a credential payload which covers a set of
// changes.
//
// This function will return an error if no given commits contain a change
// description.
func LastChangeDescription(commits []Commit) (string, error) {
for i := range commits {
i = len(commits) - 1 - i
payUn := commits[i].Payload
if payUn.Change != nil {
return payUn.Change.Description, nil
} else if payUn.Credential != nil && payUn.Credential.ChangeDescription != "" {
return payUn.Credential.ChangeDescription, nil
}
}
return "", errors.New("no commits in range contain a change description")
}
type changeRangeInfo struct {
changeCommits []Commit
authors map[string]struct{}
startTree, endTree *object.Tree
changeDescription string
}
// changeRangeInfo returns various pieces of information about a range of
// commits' changes.
func (proj *Project) changeRangeInfo(commits []Commit) (changeRangeInfo, error) {
info := changeRangeInfo{
authors: map[string]struct{}{},
}
for _, commit := range commits {
if commit.Payload.Change != nil {
info.changeCommits = append(info.changeCommits, commit)
for _, cred := range commit.Payload.Common.Credentials {
info.authors[cred.AccountID] = struct{}{}
}
}
}
if len(info.changeCommits) == 0 {
return changeRangeInfo{}, errors.New("no change commits found in range")
}
// startTree has to be the tree of the parent of the first commit, which
// isn't included in commits. Determine it the hard way.
var err error
if info.startTree, err = proj.parentTree(commits[0].Object); err != nil {
return changeRangeInfo{}, fmt.Errorf("getting tree of parent of %q: %w",
commits[0].Hash, err)
} else if info.changeDescription, err = LastChangeDescription(commits); err != nil {
return changeRangeInfo{}, err
}
lastChangeCommit := info.changeCommits[len(info.changeCommits)-1]
info.endTree = lastChangeCommit.TreeObject
return info, nil
}
func (info changeRangeInfo) changeFingerprint(descr string) ([]byte, error) {
changedFiles, err := ChangedFilesBetweenTrees(info.startTree, info.endTree)
if err != nil {
return nil, fmt.Errorf("calculating diff of commit trees %q and %q: %w",
info.startTree.Hash, info.endTree.Hash, err)
}
return genChangeFingerprint(nil, descr, changedFiles), nil
}
// VerifyCanSetBranchHEADTo is used to verify that a branch's HEAD can be set to
// the given hash. It verifies any new commits which are being added, and
// handles verifying non-fast-forward commits as well.
//
// If the given hash matches the current HEAD of the branch then this performs
// no further checks and returns nil.
func (proj *Project) VerifyCanSetBranchHEADTo(branchName plumbing.ReferenceName, hash plumbing.Hash) error {
oldCommitRef, err := proj.GitRepo.Reference(branchName, true)
if errors.Is(err, plumbing.ErrReferenceNotFound) {
// if the branch is being created then just pull all of its commits and
// verify them.
// TODO optimize this so that it tries to use the merge-base with main,
// so we're not re-verifying a ton of commits unecessarily
commits, err := proj.GetCommitRange(plumbing.ZeroHash, hash)
if err != nil {
return fmt.Errorf("retrieving %q and all its ancestors: %w", hash, err)
}
return proj.VerifyCommits(branchName, commits)
} else if err != nil {
return fmt.Errorf("resolving branch reference to a hash: %w", err)
} else if oldCommitRef.Hash() == hash {
// if the HEAD is already at the given hash then it must be fine.
return nil
}
oldCommitObj, err := proj.GitRepo.CommitObject(oldCommitRef.Hash())
if err != nil {
return fmt.Errorf("retrieving commit object %q: %w", oldCommitRef.Hash(), err)
}
newCommit, err := proj.GetCommit(hash)
if err != nil {
return fmt.Errorf("retrieving commit %q: %w", hash, err)
}
if isAncestor, err := newCommit.Object.IsAncestor(oldCommitObj); err != nil {
return fmt.Errorf("determining if %q is an ancestor of %q: %w",
newCommit.Hash, oldCommitObj.Hash, err)
} else if isAncestor {
// if the new commit is an ancestor of the old one then the branch is
// being force-pushed to a previous commit. This is weird to handle
// using VerifyCommits, so just call verifyCommit directly.
return proj.verifyCommit(branchName, newCommit, nil, true)
}
mbCommits, err := oldCommitObj.MergeBase(newCommit.Object)
if err != nil {
return fmt.Errorf("determining merge-base between %q and %q: %w",
oldCommitObj.Hash, newCommit.Hash, err)
} else if len(mbCommits) == 0 {
return fmt.Errorf("%q and %q have no ancestors in common",
oldCommitObj.Hash, newCommit.Hash)
} else if len(mbCommits) == 2 {
return fmt.Errorf("%q and %q have more than one ancestor in common",
oldCommitObj.Hash, newCommit.Hash)
}
commits, err := proj.GetCommitRange(mbCommits[0].Hash, hash)
if err != nil {
return fmt.Errorf("retrieving commits %q to %q: %w", mbCommits[0].Hash, hash, err)
}
return proj.VerifyCommits(branchName, commits)
}

View File

@ -1,176 +0,0 @@
package dehub
import (
"bytes"
"errors"
"fmt"
"sort"
"strings"
"dehub.dev/src/dehub.git/fs"
"dehub.dev/src/dehub.git/sigcred"
"gopkg.in/src-d/go-git.v4/plumbing"
"gopkg.in/src-d/go-git.v4/plumbing/object"
)
// PayloadChange describes the structure of a change payload.
type PayloadChange struct {
Description string `yaml:"description"`
// LegacyMessage is no longer used, use Description instead
LegacyMessage string `yaml:"message,omitempty"`
}
var _ Payload = PayloadChange{}
// NewPayloadChange constructs a PayloadUnion populated with a PayloadChange
// encompassing the currently staged file changes. The Credentials of the
// returned PayloadUnion will _not_ be filled in.
func (proj *Project) NewPayloadChange(description string) (PayloadUnion, error) {
headTree := new(object.Tree)
if head, err := proj.GetHeadCommit(); err != nil && !errors.Is(err, ErrHeadIsZero) {
return PayloadUnion{}, fmt.Errorf("getting HEAD commit: %w", err)
} else if err == nil {
headTree = head.TreeObject
}
_, stagedTree, err := fs.FromStagedChangesTree(proj.GitRepo)
if err != nil {
return PayloadUnion{}, err
}
changedFiles, err := ChangedFilesBetweenTrees(headTree, stagedTree)
if err != nil {
return PayloadUnion{}, fmt.Errorf("calculating diff between HEAD and staged changes: %w", err)
}
payCh := PayloadChange{Description: description}
fingerprint, err := payCh.Fingerprint(changedFiles)
if err != nil {
return PayloadUnion{}, err
}
return PayloadUnion{
Change: &payCh,
Common: PayloadCommon{Fingerprint: fingerprint},
}, nil
}
// MessageHead implements the method for the Payload interface.
func (payCh PayloadChange) MessageHead(PayloadCommon) string {
return payCh.Description
}
// Fingerprint implements the method for the Payload interface.
func (payCh PayloadChange) Fingerprint(changedFiles []ChangedFile) ([]byte, error) {
return genChangeFingerprint(nil, payCh.Description, changedFiles), nil
}
// UnmarshalYAML implements the yaml.Unmarshaler interface.
func (payCh *PayloadChange) UnmarshalYAML(unmarshal func(interface{}) error) error {
var wrap struct {
Inner PayloadChange `yaml:",inline"`
}
if err := unmarshal(&wrap); err != nil {
return err
}
*payCh = wrap.Inner
if payCh.LegacyMessage != "" {
payCh.Description = payCh.LegacyMessage
payCh.LegacyMessage = ""
}
return nil
}
// CombinePayloadChanges takes all changes in the given range, combines them
// into a single PayloadChange, and commits it. The resulting payload will have
// the same message as the latest change payload in the range. If the
// fingerprint of the PayloadChange produced by this method has any matching
// Credentials in the range, those will be included in the payload as well.
//
// The combined commit is committed to the project with the given revision as
// its parent. If the diff across the given range and the diff from onto to the
// end of the range are different then this will return an error.
func (proj *Project) CombinePayloadChanges(commits []Commit, onto plumbing.ReferenceName) (Commit, error) {
info, err := proj.changeRangeInfo(commits)
if err != nil {
return Commit{}, err
}
commitsFingerprint, err := info.changeFingerprint(info.changeDescription)
if err != nil {
return Commit{}, err
}
authors := make([]string, 0, len(info.authors))
for author := range info.authors {
authors = append(authors, author)
}
sort.Strings(authors)
ontoBranchName, err := proj.ReferenceToBranchName(onto)
if err != nil {
return Commit{}, fmt.Errorf("resolving %q into a branch name: %w", onto, err)
}
// now determine the change hash from onto->end, to ensure that it remains
// the same as from start->end
ontoCommit, err := proj.GetCommitByRevision(plumbing.Revision(onto))
if err != nil {
return Commit{}, fmt.Errorf("resolving revision %q: %w", onto, err)
}
ontoEndChangedFiles, err := ChangedFilesBetweenTrees(ontoCommit.TreeObject, info.endTree)
if err != nil {
return Commit{}, fmt.Errorf("calculating file changes between %q and %q: %w",
ontoCommit.Hash, commits[len(commits)-1].Hash, err)
}
ontoEndChangeFingerprint := genChangeFingerprint(nil, info.changeDescription, ontoEndChangedFiles)
if !bytes.Equal(ontoEndChangeFingerprint, commitsFingerprint) {
// TODO figure out what files to show as being the "problem files" in
// the error message
return Commit{}, fmt.Errorf("combining onto %q would produce a different change fingerprint, aborting combine", onto.Short())
}
var creds []sigcred.CredentialUnion
for _, commit := range commits {
if bytes.Equal(commit.Payload.Common.Fingerprint, commitsFingerprint) {
creds = append(creds, commit.Payload.Common.Credentials...)
}
}
// this is mostly to make tests easier
sort.Slice(creds, func(i, j int) bool {
return creds[i].AccountID < creds[j].AccountID
})
payUn := PayloadUnion{
Change: &PayloadChange{
Description: info.changeDescription,
},
Common: PayloadCommon{
Fingerprint: commitsFingerprint,
Credentials: creds,
},
}
commit, err := proj.CommitDirect(CommitDirectParams{
PayloadUnion: payUn,
Author: strings.Join(authors, ","),
ParentHash: ontoCommit.Hash,
GitTree: info.endTree,
})
if err != nil {
return Commit{}, fmt.Errorf("storing commit: %w", err)
}
// set the onto branch to this new commit
newHeadRef := plumbing.NewHashReference(ontoBranchName, commit.Hash)
if err := proj.GitRepo.Storer.SetReference(newHeadRef); err != nil {
return Commit{}, fmt.Errorf("setting reference %q to new commit hash %q: %w",
ontoBranchName, commit.Hash, err)
}
return commit, nil
}

View File

@ -1,183 +0,0 @@
package dehub
import (
"reflect"
"strings"
"testing"
"github.com/davecgh/go-spew/spew"
"gopkg.in/src-d/go-git.v4/plumbing"
)
func TestPayloadChangeVerify(t *testing.T) {
type step struct {
descr string
msgHead string // defaults to msg
tree map[string]string
}
testCases := []struct {
descr string
steps []step
}{
{
descr: "single commit",
steps: []step{
{
descr: "first commit",
tree: map[string]string{"a": "0", "b": "1"},
},
},
},
{
descr: "multiple commits",
steps: []step{
{
descr: "first commit",
tree: map[string]string{"a": "0", "b": "1"},
},
{
descr: "second commit, changing a",
tree: map[string]string{"a": "1"},
},
{
descr: "third commit, empty",
},
{
descr: "fourth commit, adding c, removing b",
tree: map[string]string{"b": "", "c": "2"},
},
},
},
{
descr: "big body commits",
steps: []step{
{
descr: "first commit, single line but with newline\n",
},
{
descr: "second commit, single line but with two newlines\n\n",
msgHead: "second commit, single line but with two newlines\n\n",
},
{
descr: "third commit, multi-line with one newline\nanother line!",
msgHead: "third commit, multi-line with one newline\n\n",
},
{
descr: "fourth commit, multi-line with two newlines\n\nanother line!",
msgHead: "fourth commit, multi-line with two newlines\n\n",
},
},
},
}
for _, test := range testCases {
t.Run(test.descr, func(t *testing.T) {
h := newHarness(t)
rootSig := h.stageNewAccount("root", false)
for _, step := range test.steps {
h.stage(step.tree)
commit := h.assertCommitChange(verifyShouldSucceed, step.descr, rootSig)
if step.msgHead == "" {
step.msgHead = strings.TrimSpace(step.descr) + "\n\n"
}
if !strings.HasPrefix(commit.Object.Message, step.msgHead) {
t.Fatalf("commit message %q does not start with expected head %q",
commit.Object.Message, step.msgHead)
}
var payUn PayloadUnion
if err := payUn.UnmarshalText([]byte(commit.Object.Message)); err != nil {
t.Fatalf("error unmarshaling commit message: %v", err)
} else if !reflect.DeepEqual(payUn, commit.Payload) {
t.Fatalf("returned change payload:\n%s\ndoes not match actual one:\n%s",
spew.Sdump(commit.Payload), spew.Sdump(payUn))
}
}
})
}
}
func TestCombinePayloadChanges(t *testing.T) {
h := newHarness(t)
// commit initial config, so the root user can modify it in the next commit
rootSig := h.stageNewAccount("root", false)
h.assertCommitChange(verifyShouldSucceed, "initial commit", rootSig)
// add a toot user and modify the access controls such that both accounts
// are required for the main branch
tootSig := h.stageNewAccount("toot", false)
h.stageAccessControls(`
- action: allow
filters:
- type: branch
pattern: main
- type: payload_type
payload_type: change
- type: signature
any_account: true
count: 2
- action: allow
filters:
- type: not
filter:
type: branch
pattern: main
- type: signature
any_account: true
count: 1
`)
tootCommit := h.assertCommitChange(verifyShouldSucceed, "add toot", rootSig)
// make a single change commit in another branch using root. Then add a
// credential using toot, and combine them onto main.
otherBranch := plumbing.NewBranchReferenceName("other")
h.checkout(otherBranch)
h.stage(map[string]string{"foo": "bar"})
fooCommit := h.assertCommitChange(verifyShouldSucceed, "add foo file", rootSig)
// now adding a credential commit from toot should work
credCommitPayUn, err := h.proj.NewPayloadCredential(fooCommit.Payload.Common.Fingerprint)
if err != nil {
t.Fatal(err)
}
credCommit := h.tryCommit(verifyShouldSucceed, credCommitPayUn, tootSig)
allCommits, err := h.proj.GetCommitRange(tootCommit.Hash, credCommit.Hash)
if err != nil {
t.Fatalf("getting commits: %v", err)
}
combinedCommit, err := h.proj.CombinePayloadChanges(allCommits, MainRefName)
if err != nil {
t.Fatal(err)
}
// that new commit should have both credentials
creds := combinedCommit.Payload.Common.Credentials
if len(creds) != 2 {
t.Fatalf("combined commit has %d credentials, not 2", len(creds))
} else if creds[0].AccountID != "root" {
t.Fatalf("combined commit first credential should be from root, is from %q", creds[0].AccountID)
} else if creds[1].AccountID != "toot" {
t.Fatalf("combined commit second credential should be from toot, is from %q", creds[1].AccountID)
}
// double check that the HEAD commit of main got properly set
h.checkout(MainRefName)
mainHead, err := h.proj.GetHeadCommit()
if err != nil {
t.Fatal(err)
} else if mainHead.Hash != combinedCommit.Hash {
t.Fatalf("mainHead's should be pointed at %s but is pointed at %s",
combinedCommit.Hash, mainHead.Hash)
} else if err = h.proj.VerifyCommits(MainRefName, []Commit{combinedCommit}); err != nil {
t.Fatalf("unable to verify combined commit: %v", err)
} else if author := combinedCommit.Object.Author.Name; author != "root" {
t.Fatalf("unexpected author value %q", author)
}
}

View File

@ -1,39 +0,0 @@
package dehub
import (
"errors"
)
// PayloadComment describes the structure of a comment payload.
type PayloadComment struct {
Comment string `yaml:"comment"`
}
var _ Payload = PayloadComment{}
// NewPayloadComment constructs a PayloadUnion populated with a PayloadComment.
// The Credentials of the returned PayloadUnion will _not_ be filled in.
func (proj *Project) NewPayloadComment(comment string) (PayloadUnion, error) {
payCom := PayloadComment{Comment: comment}
fingerprint, err := payCom.Fingerprint(nil)
if err != nil {
return PayloadUnion{}, err
}
return PayloadUnion{
Comment: &payCom,
Common: PayloadCommon{Fingerprint: fingerprint},
}, nil
}
// MessageHead implements the method for the Payload interface.
func (payCom PayloadComment) MessageHead(common PayloadCommon) string {
return `"` + payCom.Comment + `"`
}
// Fingerprint implements the method for the Payload interface.
func (payCom PayloadComment) Fingerprint(changes []ChangedFile) ([]byte, error) {
if len(changes) > 0 {
return nil, errors.New("PayloadComment cannot have any changed files")
}
return genCommentFingerprint(nil, payCom.Comment), nil
}

View File

@ -1,82 +0,0 @@
package dehub
import (
"errors"
)
// PayloadCredential describes the structure of a credential payload.
type PayloadCredential struct {
// CommitHashes represents the commits which this credential is accrediting.
// It is only present for informational purposes, as commits don't not have
// any bearing on the CredentialedHash itself.
CommitHashes []string `yaml:"commits,omitempty"`
// ChangeDescription represents the description which has been credentialed.
// This field is only relevant if the Credential in the payload is for a
// change set.
ChangeDescription string `yaml:"change_description"`
}
var _ Payload = PayloadCredential{}
// NewPayloadCredential constructs and returns a PayloadUnion populated with a
// PayloadCredential for the given fingerprint. The Credentials of the returned
// PayloadUnion will _not_ be filled in.
func (proj *Project) NewPayloadCredential(fingerprint []byte) (PayloadUnion, error) {
return PayloadUnion{
Credential: &PayloadCredential{},
Common: PayloadCommon{Fingerprint: fingerprint},
}, nil
}
// NewPayloadCredentialFromChanges constructs and returns a PayloadUnion
// populated with a PayloadCredential. The fingerprint of the payload will be a
// change fingerprint generated from the given description and all changes in
// the given range of Commits.
//
// If an empty description is given then the description of the last change
// payload in the range is used when generating the fingerprint.
func (proj *Project) NewPayloadCredentialFromChanges(descr string, commits []Commit) (PayloadUnion, error) {
info, err := proj.changeRangeInfo(commits)
if err != nil {
return PayloadUnion{}, err
}
if descr == "" {
descr = info.changeDescription
}
fingerprint, err := info.changeFingerprint(descr)
if err != nil {
return PayloadUnion{}, err
}
payCred, err := proj.NewPayloadCredential(fingerprint)
if err != nil {
return PayloadUnion{}, err
}
payCred.Credential.ChangeDescription = descr
for _, commit := range info.changeCommits {
payCred.Credential.CommitHashes = append(
payCred.Credential.CommitHashes,
commit.Hash.String(),
)
}
return payCred, nil
}
// MessageHead implements the method for the Payload interface.
func (payCred PayloadCredential) MessageHead(common PayloadCommon) string {
return "Credential of " + common.Fingerprint.String()
}
// Fingerprint implements the method for the Payload interface.
func (payCred PayloadCredential) Fingerprint(changes []ChangedFile) ([]byte, error) {
if len(changes) > 0 {
return nil, errors.New("PayloadCredential cannot have any changed files")
}
// a PayloadCredential can't compute its own fingerprint, it's stored in the
// common.
return nil, nil
}

View File

@ -1,50 +0,0 @@
package dehub
import (
"testing"
"gopkg.in/src-d/go-git.v4/plumbing"
)
func TestPayloadCredentialVerify(t *testing.T) {
h := newHarness(t)
rootSig := h.stageNewAccount("root", false)
// create a new account and modify the config so that that account is only
// allowed to add verifications to a single branch
tootSig := h.stageNewAccount("toot", false)
tootBranch := plumbing.NewBranchReferenceName("toot_branch")
h.stageAccessControls(`
- action: allow
filters:
- type: branch
pattern: ` + tootBranch.Short() + `
- type: signature
count: 1
account_ids:
- root
- toot
- action: allow
filters:
- type: signature
count: 1
account_ids:
- root
`)
rootGitCommit := h.assertCommitChange(verifyShouldSucceed, "initial commit", rootSig)
// toot user wants to create a credential commit for the root commit, for
// whatever reason.
rootChangeFingerprint := rootGitCommit.Payload.Common.Fingerprint
credCommitPayUn, err := h.proj.NewPayloadCredential(rootChangeFingerprint)
if err != nil {
t.Fatalf("creating credential commit for fingerprint %x: %v", rootChangeFingerprint, err)
}
h.tryCommit(verifyShouldFail, credCommitPayUn, tootSig)
// toot tries again in their own branch, and should be allowed.
h.checkout(tootBranch)
h.tryCommit(verifyShouldSucceed, credCommitPayUn, tootSig)
}

View File

@ -1,452 +0,0 @@
package dehub
import (
"errors"
"regexp"
"testing"
"dehub.dev/src/dehub.git/accessctl"
"dehub.dev/src/dehub.git/sigcred"
"gopkg.in/src-d/go-git.v4"
"gopkg.in/src-d/go-git.v4/plumbing"
)
func TestConfigChange(t *testing.T) {
h := newHarness(t)
rootSig := h.stageNewAccount("root", false)
var commits []Commit
// commit the initial staged changes, which merely include the config and
// public key
commit := h.assertCommitChange(verifyShouldSucceed, "commit configuration", rootSig)
commits = append(commits, commit)
// create a new account and add it to the configuration. That commit should
// not be verifiable, though
tootSig := h.stageNewAccount("toot", false)
h.stageCfg()
h.assertCommitChange(verifyShouldFail, "add toot user", tootSig)
// now add with the root user, this should work.
h.stageCfg()
commit = h.assertCommitChange(verifyShouldSucceed, "add toot user", rootSig)
commits = append(commits, commit)
// _now_ the toot user should be able to do things.
h.stage(map[string]string{"foo/bar": "what a cool file"})
commit = h.assertCommitChange(verifyShouldSucceed, "add a cool file", tootSig)
commits = append(commits, commit)
if err := h.proj.VerifyCommits(MainRefName, commits); err != nil {
t.Fatal(err)
}
}
func TestMainAncestryRequirement(t *testing.T) {
otherBranch := plumbing.NewBranchReferenceName("other")
t.Run("empty repo", func(t *testing.T) {
h := newHarness(t)
rootSig := h.stageNewAccount("root", false)
h.checkout(otherBranch)
// stage and try to add to the "other" branch, it shouldn't work though
h.stageCfg()
h.assertCommitChange(verifyShouldFail, "starting new branch at other", rootSig)
})
t.Run("new branch, single commit", func(t *testing.T) {
h := newHarness(t)
rootSig := h.stageNewAccount("root", false)
h.assertCommitChange(verifyShouldSucceed, "add cfg", rootSig)
// set HEAD to this other branch which doesn't really exist
ref := plumbing.NewSymbolicReference(plumbing.HEAD, otherBranch)
if err := h.proj.GitRepo.Storer.SetReference(ref); err != nil {
h.t.Fatal(err)
}
h.stageCfg()
h.assertCommitChange(verifyShouldFail, "starting new branch at other", rootSig)
})
}
func TestAnonymousCommits(t *testing.T) {
h := newHarness(t)
anonSig := h.stageNewAccount("anon", true)
h.stageAccessControls(`
- action: allow
filters:
- type: signature
any: true
`)
h.assertCommitChange(verifyShouldSucceed, "this will work", anonSig)
}
func TestNonFastForwardCommits(t *testing.T) {
h := newHarness(t)
rootSig := h.stageNewAccount("root", false)
initCommit := h.assertCommitChange(verifyShouldSucceed, "init", rootSig)
// add another commit
h.stage(map[string]string{"foo": "foo"})
fooCommit := h.assertCommitChange(verifyShouldSucceed, "foo", rootSig)
commitOn := func(hash plumbing.Hash, msg string) Commit {
ref := plumbing.NewHashReference(plumbing.HEAD, hash)
if err := h.proj.GitRepo.Storer.SetReference(ref); err != nil {
h.t.Fatal(err)
} else if commitChange, err := h.proj.NewPayloadChange("bar"); err != nil {
h.t.Fatal(err)
} else if commitChange, err = h.proj.AccreditPayload(commitChange, rootSig); err != nil {
h.t.Fatal(err)
} else if gitCommit, err := h.proj.Commit(commitChange); err != nil {
h.t.Fatal(err)
} else {
return gitCommit
}
panic("can't get here")
}
// checkout initCommit directly, make a new commit on top of it, and try to
// verify that (this is too fancy for the harness, must be done manually).
h.stage(map[string]string{"bar": "bar"})
barCommit := commitOn(initCommit.Hash, "bar")
err := h.proj.VerifyCommits(MainRefName, []Commit{barCommit})
if !errors.As(err, new(accessctl.ErrCommitRequestDenied)) {
h.t.Fatalf("expected ErrCommitRequestDenied, got: %v", err)
}
// check main back out (fooCommit should be checked out), and modify the
// config to allow nonFF commits, and add another bogus commit on top.
h.checkout(MainRefName)
h.stageAccessControls(`
- action: allow
filters:
- type: commit_attributes
non_fast_forward: true`)
h.stageCfg()
allowNonFFCommit := h.assertCommitChange(verifyShouldSucceed, "allow non-ff", rootSig)
h.stage(map[string]string{"foo": "foo foo"})
h.assertCommitChange(verifyShouldSucceed, "foo foo", rootSig)
// checking out allowNonFFCommit directly and performing a nonFF commit
// should work now.
h.stage(map[string]string{"baz": "baz"})
bazCommit := commitOn(allowNonFFCommit.Hash, "baz")
if err = h.proj.VerifyCommits(MainRefName, []Commit{bazCommit}); err != nil {
h.t.Fatal(err)
}
// verifying the full history should also work
gitCommits := []Commit{initCommit, fooCommit, allowNonFFCommit, bazCommit}
if err = h.proj.VerifyCommits(MainRefName, gitCommits); err != nil {
h.t.Fatal(err)
}
}
func TestVerifyCanSetBranchHEADTo(t *testing.T) {
type toTest struct {
// branchName and hash are the arguments passed into
// VerifyCanSetBranchHEADTo.
branchName plumbing.ReferenceName
hash plumbing.Hash
// if set then the branch will have its HEAD reset to this hash prior to
// calling VerifyCanSetBranchHEADTo.
resetTo plumbing.Hash
}
type test struct {
descr string
init func(h *harness, rootSig sigcred.Signifier) toTest
// If true then the verify call is expected to fail. The string is a
// regex which should match the unwrapped error returned.
expErr string
}
tests := []test{
{
descr: "creation of main",
init: func(h *harness, rootSig sigcred.Signifier) toTest {
// checkout other and build on top of that, so that when
// VerifyCanSetBranchHEADTo is called main won't exist.
other := plumbing.NewBranchReferenceName("other")
h.checkout(other)
initCommit := h.assertCommitChange(verifySkip, "init", rootSig)
return toTest{
branchName: MainRefName,
hash: initCommit.Hash,
}
},
},
{
descr: "main ff",
init: func(h *harness, rootSig sigcred.Signifier) toTest {
initCommit := h.assertCommitChange(verifySkip, "init", rootSig)
h.stage(map[string]string{"foo": "foo"})
nextCommit := h.assertCommitChange(verifySkip, "next", rootSig)
return toTest{
branchName: MainRefName,
hash: nextCommit.Hash,
resetTo: initCommit.Hash,
}
},
},
{
descr: "new branch, no main",
init: func(h *harness, rootSig sigcred.Signifier) toTest {
// checkout other and build on top of that, so that when
// VerifyCanSetBranchHEADTo is called main won't exist.
other := plumbing.NewBranchReferenceName("other")
h.checkout(other)
initCommit := h.assertCommitChange(verifySkip, "init", rootSig)
return toTest{
branchName: plumbing.NewBranchReferenceName("other2"),
hash: initCommit.Hash,
}
},
expErr: `^cannot verify commits in branch "refs/heads/other2" when no main branch exists$`,
},
{
// this case isn't generally possible, unless someone manually
// creates a branch in an empty repo on the remote
descr: "existing branch, no main",
init: func(h *harness, rootSig sigcred.Signifier) toTest {
// checkout other and build on top of that, so that when
// VerifyCanSetBranchHEADTo is called main won't exist.
other := plumbing.NewBranchReferenceName("other")
h.checkout(other)
initCommit := h.assertCommitChange(verifySkip, "init", rootSig)
h.stage(map[string]string{"foo": "foo"})
fooCommit := h.assertCommitChange(verifySkip, "foo", rootSig)
return toTest{
branchName: other,
hash: fooCommit.Hash,
resetTo: initCommit.Hash,
}
},
expErr: `^cannot verify commits in branch "refs/heads/other" when no main branch exists$`,
},
{
descr: "new branch, not ancestor of main",
init: func(h *harness, rootSig sigcred.Signifier) toTest {
h.assertCommitChange(verifySkip, "init", rootSig)
// create new branch with no HEAD, and commit on that.
other := plumbing.NewBranchReferenceName("other")
ref := plumbing.NewSymbolicReference(plumbing.HEAD, other)
if err := h.proj.GitRepo.Storer.SetReference(ref); err != nil {
t.Fatal(err)
}
h.stageCfg()
h.stage(map[string]string{"foo": "foo"})
badInitCommit := h.assertCommitChange(verifySkip, "a different init", rootSig)
return toTest{
branchName: plumbing.NewBranchReferenceName("other2"),
hash: badInitCommit.Hash,
}
},
expErr: `^commit "[0-9a-f]+" must be direct descendant of root commit of "main" \("[0-9a-f]+"\)$`,
},
{
// this case isn't generally possible, unless someone manually
// creates a branch in an empty repo on the remote
descr: "existing branch, not ancestor of main",
init: func(h *harness, rootSig sigcred.Signifier) toTest {
h.assertCommitChange(verifySkip, "init", rootSig)
// create new branch with no HEAD, and commit on that.
other := plumbing.NewBranchReferenceName("other")
ref := plumbing.NewSymbolicReference(plumbing.HEAD, other)
if err := h.proj.GitRepo.Storer.SetReference(ref); err != nil {
t.Fatal(err)
}
h.stageCfg()
h.stage(map[string]string{"foo": "foo"})
badInitCommit := h.assertCommitChange(verifySkip, "a different init", rootSig)
h.stage(map[string]string{"bar": "bar"})
barCommit := h.assertCommitChange(verifySkip, "bar", rootSig)
return toTest{
branchName: other,
hash: barCommit.Hash,
resetTo: badInitCommit.Hash,
}
},
expErr: `^commit "[0-9a-f]+" must be direct descendant of root commit of "main" \("[0-9a-f]+"\)$`,
},
{
descr: "new branch off of main",
init: func(h *harness, rootSig sigcred.Signifier) toTest {
initCommit := h.assertCommitChange(verifySkip, "init", rootSig)
other := plumbing.NewBranchReferenceName("other")
h.checkout(other)
h.stage(map[string]string{"foo": "foo"})
fooCommit := h.assertCommitChange(verifySkip, "foo", rootSig)
return toTest{
branchName: other,
hash: fooCommit.Hash,
resetTo: initCommit.Hash,
}
},
},
{
descr: "new branch off of older main commit",
init: func(h *harness, rootSig sigcred.Signifier) toTest {
initCommit := h.assertCommitChange(verifySkip, "init", rootSig)
h.stage(map[string]string{"foo": "foo"})
h.assertCommitChange(verifySkip, "foo", rootSig)
other := plumbing.NewBranchReferenceName("other")
h.checkout(other)
h.reset(initCommit.Hash, git.HardReset)
h.stage(map[string]string{"bar": "bar"})
barCommit := h.assertCommitChange(verifySkip, "bar", rootSig)
return toTest{
branchName: other,
hash: barCommit.Hash,
resetTo: initCommit.Hash,
}
},
},
{
descr: "branch ff",
init: func(h *harness, rootSig sigcred.Signifier) toTest {
h.assertCommitChange(verifySkip, "init", rootSig)
other := plumbing.NewBranchReferenceName("other")
h.checkout(other)
var commits []Commit
for _, str := range []string{"foo", "bar", "baz", "biz", "buz"} {
h.stage(map[string]string{str: str})
commit := h.assertCommitChange(verifySkip, str, rootSig)
commits = append(commits, commit)
}
return toTest{
branchName: other,
hash: commits[len(commits)-1].Hash,
resetTo: commits[0].Hash,
}
},
},
{
descr: "main nonff",
init: func(h *harness, rootSig sigcred.Signifier) toTest {
initCommit := h.assertCommitChange(verifySkip, "init", rootSig)
h.stage(map[string]string{"foo": "foo"})
h.assertCommitChange(verifySkip, "foo", rootSig)
// start another branch back at init and make a new commit on it
other := plumbing.NewBranchReferenceName("other")
h.checkout(other)
h.reset(initCommit.Hash, git.HardReset)
h.stage(map[string]string{"bar": "bar"})
barCommit := h.assertCommitChange(verifySkip, "bar", rootSig)
return toTest{
branchName: MainRefName,
hash: barCommit.Hash,
}
},
expErr: `^commit matched and denied by this access control:`,
},
{
descr: "branch nonff",
init: func(h *harness, rootSig sigcred.Signifier) toTest {
h.assertCommitChange(verifySkip, "init", rootSig)
other := plumbing.NewBranchReferenceName("other")
h.checkout(other)
h.stage(map[string]string{"foo": "foo"})
fooCommit := h.assertCommitChange(verifySkip, "foo", rootSig)
h.stage(map[string]string{"bar": "bar"})
h.assertCommitChange(verifySkip, "bar", rootSig)
other2 := plumbing.NewBranchReferenceName("other2")
h.checkout(other2)
h.reset(fooCommit.Hash, git.HardReset)
h.stage(map[string]string{"baz": "baz"})
bazCommit := h.assertCommitChange(verifySkip, "baz", rootSig)
return toTest{
branchName: other,
hash: bazCommit.Hash,
}
},
},
{
descr: "branch nonff to previous commit",
init: func(h *harness, rootSig sigcred.Signifier) toTest {
h.assertCommitChange(verifySkip, "init", rootSig)
other := plumbing.NewBranchReferenceName("other")
h.checkout(other)
h.stage(map[string]string{"foo": "foo"})
fooCommit := h.assertCommitChange(verifySkip, "foo", rootSig)
h.stage(map[string]string{"bar": "bar"})
h.assertCommitChange(verifySkip, "bar", rootSig)
return toTest{
branchName: other,
hash: fooCommit.Hash,
}
},
},
}
for _, test := range tests {
t.Run(test.descr, func(t *testing.T) {
h := newHarness(t)
rootSig := h.stageNewAccount("root", false)
toTest := test.init(h, rootSig)
if toTest.resetTo != plumbing.ZeroHash {
ref := plumbing.NewHashReference(toTest.branchName, toTest.resetTo)
if err := h.proj.GitRepo.Storer.SetReference(ref); err != nil {
t.Fatal(err)
}
}
err := h.proj.VerifyCanSetBranchHEADTo(toTest.branchName, toTest.hash)
if test.expErr == "" {
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
return
} else if err == nil {
t.Fatal("expected verification to fail")
}
ogErr := err
for {
if unwrappedErr := errors.Unwrap(err); unwrappedErr != nil {
err = unwrappedErr
} else {
break
}
}
errRegex := regexp.MustCompile(test.expErr)
if !errRegex.MatchString(err.Error()) {
t.Fatalf("\nexpected error of form %q\nbut got: %v", test.expErr, ogErr)
}
})
}
}

View File

@ -1,326 +0,0 @@
// Package dehub TODO needs package docs
package dehub
import (
"bytes"
"errors"
"fmt"
"io"
"os"
"path/filepath"
"dehub.dev/src/dehub.git/fs"
"gopkg.in/src-d/go-billy.v4"
"gopkg.in/src-d/go-billy.v4/memfs"
"gopkg.in/src-d/go-git.v4"
"gopkg.in/src-d/go-git.v4/plumbing"
"gopkg.in/src-d/go-git.v4/plumbing/cache"
"gopkg.in/src-d/go-git.v4/plumbing/format/config"
"gopkg.in/src-d/go-git.v4/storage"
"gopkg.in/src-d/go-git.v4/storage/filesystem"
)
const (
// DehubDir defines the name of the directory where all dehub-related files
// are expected to be found within the git repo.
DehubDir = ".dehub"
)
var (
// ConfigPath defines the expected path to the Project's configuration file.
ConfigPath = filepath.Join(DehubDir, "config.yml")
// Main defines the name of the main branch.
Main = "main"
// MainRefName defines the reference name of the main branch.
MainRefName = plumbing.NewBranchReferenceName(Main)
)
type openOpts struct {
bare bool
}
// OpenOption is an option which can be passed to the OpenProject function to
// affect the Project's behavior.
type OpenOption func(*openOpts)
// OpenBareRepo returns an OpenOption which, if true is given, causes the
// OpenProject function to expect to open a bare git repo.
func OpenBareRepo(bare bool) OpenOption {
return func(o *openOpts) {
o.bare = bare
}
}
// Project implements accessing and modifying a local dehub project, as well as
// extending the functionality of the underlying git repo in ways which are
// specifically useful for dehub projects.
type Project struct {
// GitRepo is the git repository which houses the project.
GitRepo *git.Repository
// GitDirFS corresponds to the .git directory (or the entire repo directory
// if it's a bare repo)
GitDirFS billy.Filesystem
}
func extractGitDirFS(storer storage.Storer) (billy.Filesystem, error) {
dotGitFSer, ok := storer.(interface{ Filesystem() billy.Filesystem })
if !ok {
return nil, fmt.Errorf("git storage object of type %T does not expose its underlying filesystem",
storer)
}
return dotGitFSer.Filesystem(), nil
}
// OpenProject opens the dehub project in the given directory and returns a
// Project instance for it.
//
// The given path is expected to have a git repo already initialized.
func OpenProject(path string, options ...OpenOption) (*Project, error) {
var opts openOpts
for _, opt := range options {
opt(&opts)
}
proj := Project{}
var err error
openOpts := &git.PlainOpenOptions{
DetectDotGit: !opts.bare,
}
if proj.GitRepo, err = git.PlainOpenWithOptions(path, openOpts); err != nil {
return nil, fmt.Errorf("opening git repo: %w", err)
} else if proj.GitDirFS, err = extractGitDirFS(proj.GitRepo.Storer); err != nil {
return nil, err
}
return &proj, nil
}
type initOpts struct {
bare bool
remote bool
}
// InitOption is an option which can be passed into the Init functions to affect
// their behavior.
type InitOption func(*initOpts)
// InitBareRepo returns an InitOption which, if true is given, causes the Init
// function to initialize the project's git repo without a worktree.
func InitBareRepo(bare bool) InitOption {
return func(o *initOpts) {
o.bare = bare
}
}
// InitRemoteRepo returns an InitOption which, if true is given, causes the Init
// function to initialize the project's git repo with certain git configuration
// options set which make the repo able to be used as a remote repo.
func InitRemoteRepo(remote bool) InitOption {
return func(o *initOpts) {
o.remote = remote
}
}
// InitProject will initialize a new project at the given path. If bare is true
// then the project's git repo will not have a worktree.
func InitProject(path string, options ...InitOption) (*Project, error) {
var opts initOpts
for _, opt := range options {
opt(&opts)
}
var proj Project
var err error
if proj.GitRepo, err = git.PlainInit(path, opts.bare); err != nil {
return nil, fmt.Errorf("initializing git repo: %w", err)
} else if proj.GitDirFS, err = extractGitDirFS(proj.GitRepo.Storer); err != nil {
return nil, err
} else if err = proj.init(opts); err != nil {
return nil, fmt.Errorf("initializing repo with dehub defaults: %w", err)
}
return &proj, nil
}
// InitMemProject initializes an empty project which only exists in memory.
func InitMemProject(options ...InitOption) *Project {
var opts initOpts
for _, opt := range options {
opt(&opts)
}
fs := memfs.New()
dotGitFS, err := fs.Chroot(git.GitDirName)
if err != nil {
panic(err)
}
storage := filesystem.NewStorage(dotGitFS, cache.NewObjectLRUDefault())
var worktree billy.Filesystem
if !opts.bare {
worktree = fs
}
r, err := git.Init(storage, worktree)
if err != nil {
panic(err)
}
proj := &Project{GitRepo: r, GitDirFS: dotGitFS}
if err := proj.init(opts); err != nil {
panic(err)
}
return proj
}
func (proj *Project) initRemotePreReceive(bare bool) error {
if err := proj.GitDirFS.MkdirAll("hooks", 0755); err != nil {
return fmt.Errorf("creating hooks directory: %w", err)
}
preRcvFlags := os.O_WRONLY | os.O_CREATE | os.O_TRUNC
preRcv, err := proj.GitDirFS.OpenFile("hooks/pre-receive", preRcvFlags, 0755)
if err != nil {
return fmt.Errorf("opening hooks/pre-receive file: %w", err)
}
defer preRcv.Close()
var preRcvBody string
if bare {
preRcvBody = "#!/bin/sh\nexec dehub hook -bare pre-receive\n"
} else {
preRcvBody = "#!/bin/sh\nexec dehub hook pre-receive\n"
}
if _, err := io.Copy(preRcv, bytes.NewBufferString(preRcvBody)); err != nil {
return fmt.Errorf("writing to hooks/pre-receive: %w", err)
}
return nil
}
func (proj *Project) init(opts initOpts) error {
headRef := plumbing.NewSymbolicReference(plumbing.HEAD, MainRefName)
if err := proj.GitRepo.Storer.SetReference(headRef); err != nil {
return fmt.Errorf("setting HEAD reference to %q: %w", MainRefName, err)
}
if opts.remote {
cfg, err := proj.GitRepo.Config()
if err != nil {
return fmt.Errorf("opening git cfg: %w", err)
}
cfg.Raw = cfg.Raw.AddOption("http", config.NoSubsection, "receivepack", "true")
if err := proj.GitRepo.Storer.SetConfig(cfg); err != nil {
return fmt.Errorf("storing modified git config: %w", err)
}
if err := proj.initRemotePreReceive(opts.bare); err != nil {
return fmt.Errorf("initializing pre-receive hook for remote-enabled repo: %w", err)
}
}
return nil
}
func (proj *Project) billyFilesystem() (billy.Filesystem, error) {
w, err := proj.GitRepo.Worktree()
if err != nil {
return nil, fmt.Errorf("opening git worktree: %w", err)
}
return w.Filesystem, nil
}
var errTraverseRefNoMatch = errors.New("failed to find reference matching given predicate")
// TraverseReferenceChain resolves a chain of references, calling the given
// predicate on each one, and returning the first one for which the predicate
// returns true. This method will return an error if it reaches the end of the
// chain and the predicate still has not returned true.
//
// If a reference name is encountered which does not actually exist, then it is
// assumed to be a hash reference to the zero hash.
func (proj *Project) TraverseReferenceChain(refName plumbing.ReferenceName, pred func(*plumbing.Reference) bool) (*plumbing.Reference, error) {
// TODO infinite loop checking
// TODO check that this (and the methods which use it) are actually useful
for {
ref, err := proj.GitRepo.Storer.Reference(refName)
if errors.Is(err, plumbing.ErrReferenceNotFound) {
ref = plumbing.NewHashReference(refName, plumbing.ZeroHash)
} else if err != nil {
return nil, fmt.Errorf("resolving reference %q: %w", refName, err)
}
if pred(ref) {
return ref, nil
} else if ref.Type() != plumbing.SymbolicReference {
return nil, errTraverseRefNoMatch
}
refName = ref.Target()
}
}
// ErrNoBranchReference is returned from ReferenceToBranchName if no reference
// in the reference chain is for a branch.
var ErrNoBranchReference = errors.New("no branch reference found")
// ReferenceToBranchName traverses a chain of references looking for the first
// branch reference, and returns that name, or returns ErrNoBranchReference if
// no branch reference is part of the chain.
func (proj *Project) ReferenceToBranchName(refName plumbing.ReferenceName) (plumbing.ReferenceName, error) {
// first check if the given refName is a branch, if so just return that.
if refName.IsBranch() {
return refName, nil
}
ref, err := proj.TraverseReferenceChain(refName, func(ref *plumbing.Reference) bool {
return ref.Target().IsBranch()
})
if errors.Is(err, errTraverseRefNoMatch) {
return "", ErrNoBranchReference
} else if err != nil {
return "", fmt.Errorf("traversing reference chain: %w", err)
}
return ref.Target(), nil
}
// ReferenceToHash fully resolves a reference to a hash. If a reference cannot
// be resolved then plumbing.ZeroHash is returned.
func (proj *Project) ReferenceToHash(refName plumbing.ReferenceName) (plumbing.Hash, error) {
ref, err := proj.TraverseReferenceChain(refName, func(ref *plumbing.Reference) bool {
return ref.Type() == plumbing.HashReference
})
if errors.Is(err, errTraverseRefNoMatch) {
return plumbing.ZeroHash, errors.New("no hash in reference chain (is this even possible???)")
} else if errors.Is(err, plumbing.ErrReferenceNotFound) {
return plumbing.ZeroHash, nil
} else if err != nil {
return plumbing.ZeroHash, fmt.Errorf("traversing reference chain: %w", err)
}
return ref.Hash(), nil
}
// headFS returns an FS based on the HEAD commit, or if there is no HEAD commit
// (it's an empty repo) an FS based on the raw filesystem.
func (proj *Project) headFS() (fs.FS, error) {
head, err := proj.GetHeadCommit()
if errors.Is(err, ErrHeadIsZero) {
bfs, err := proj.billyFilesystem()
if err != nil {
return nil, fmt.Errorf("getting underlying filesystem: %w", err)
}
return fs.FromBillyFilesystem(bfs), nil
} else if err != nil {
return nil, fmt.Errorf("could not get HEAD tree: %w", err)
}
return fs.FromTree(head.TreeObject), nil
}

View File

@ -1,289 +0,0 @@
package dehub
import (
"bytes"
"errors"
"io"
"math/rand"
"path/filepath"
"testing"
"dehub.dev/src/dehub.git/sigcred"
"gopkg.in/src-d/go-git.v4"
"gopkg.in/src-d/go-git.v4/plumbing"
yaml "gopkg.in/yaml.v2"
)
type harness struct {
t *testing.T
rand *rand.Rand
proj *Project
cfg *Config
}
func newHarness(t *testing.T) *harness {
rand := rand.New(rand.NewSource(0xb4eadb01))
return &harness{
t: t,
rand: rand,
proj: InitMemProject(),
cfg: new(Config),
}
}
func (h *harness) stage(tree map[string]string) {
w, err := h.proj.GitRepo.Worktree()
if err != nil {
h.t.Fatal(err)
}
fs := w.Filesystem
for path, content := range tree {
if content == "" {
if _, err := w.Remove(path); err != nil {
h.t.Fatalf("removing %q: %v", path, err)
}
continue
}
dir := filepath.Dir(path)
if err := fs.MkdirAll(dir, 0666); err != nil {
h.t.Fatalf("making directory %q: %v", dir, err)
}
f, err := fs.Create(path)
if err != nil {
h.t.Fatalf("creating file %q: %v", path, err)
} else if _, err := io.Copy(f, bytes.NewBufferString(content)); err != nil {
h.t.Fatalf("writing to file %q: %v", path, err)
} else if err := f.Close(); err != nil {
h.t.Fatalf("closing file %q: %v", path, err)
} else if _, err := w.Add(path); err != nil {
h.t.Fatalf("adding file %q to index: %v", path, err)
}
}
}
func (h *harness) stageCfg() {
cfgBody, err := yaml.Marshal(h.cfg)
if err != nil {
h.t.Fatal(err)
}
h.stage(map[string]string{ConfigPath: string(cfgBody)})
}
func (h *harness) stageNewAccount(accountID string, anon bool) sigcred.Signifier {
sig, pubKeyBody := sigcred.TestSignifierPGP(accountID, anon, h.rand)
if !anon {
h.cfg.Accounts = append(h.cfg.Accounts, Account{
ID: accountID,
Signifiers: []sigcred.SignifierUnion{{PGPPublicKey: &sigcred.SignifierPGP{
Body: string(pubKeyBody),
}}},
})
h.stageCfg()
}
return sig
}
func (h *harness) stageAccessControls(aclYAML string) {
if err := yaml.Unmarshal([]byte(aclYAML), &h.cfg.AccessControls); err != nil {
h.t.Fatal(err)
}
h.stageCfg()
}
func (h *harness) checkout(branch plumbing.ReferenceName) {
w, err := h.proj.GitRepo.Worktree()
if err != nil {
h.t.Fatal(err)
}
head, err := h.proj.GetHeadCommit()
if errors.Is(err, ErrHeadIsZero) {
// if HEAD is not resolvable to any hash than the Checkout method
// doesn't work, just set HEAD manually.
ref := plumbing.NewSymbolicReference(plumbing.HEAD, branch)
if err := h.proj.GitRepo.Storer.SetReference(ref); err != nil {
h.t.Fatal(err)
}
return
} else if err != nil {
h.t.Fatal(err)
}
_, err = h.proj.GitRepo.Storer.Reference(branch)
if errors.Is(err, plumbing.ErrReferenceNotFound) {
err = w.Checkout(&git.CheckoutOptions{
Hash: head.Hash,
Branch: branch,
Create: true,
})
} else if err != nil {
h.t.Fatalf("checking if branch already exists: %v", branch)
} else {
err = w.Checkout(&git.CheckoutOptions{
Branch: branch,
})
}
if err != nil {
h.t.Fatalf("checking out branch: %v", err)
}
}
func (h *harness) reset(to plumbing.Hash, mode git.ResetMode) {
w, err := h.proj.GitRepo.Worktree()
if err != nil {
h.t.Fatal(err)
}
err = w.Reset(&git.ResetOptions{
Commit: to,
Mode: mode,
})
if err != nil {
h.t.Fatal(err)
}
}
type verifyExpectation int
const (
verifyShouldSucceed verifyExpectation = 1
verifyShouldFail verifyExpectation = 0
verifySkip verifyExpectation = -1
)
func (h *harness) tryCommit(
verifyExp verifyExpectation,
payUn PayloadUnion,
accountSig sigcred.Signifier,
) Commit {
if accountSig != nil {
var err error
if payUn, err = h.proj.AccreditPayload(payUn, accountSig); err != nil {
h.t.Fatalf("accrediting payload: %v", err)
}
}
commit, err := h.proj.Commit(payUn)
if err != nil {
h.t.Fatalf("committing PayloadChange: %v", err)
} else if verifyExp == verifySkip {
return commit
}
branch, err := h.proj.ReferenceToBranchName(plumbing.HEAD)
if err != nil {
h.t.Fatalf("determining checked out branch: %v", err)
}
shouldSucceed := verifyExp > 0
err = h.proj.VerifyCommits(branch, []Commit{commit})
if shouldSucceed && err != nil {
h.t.Fatalf("verifying commit %q: %v", commit.Hash, err)
} else if shouldSucceed {
return commit
} else if !shouldSucceed && err == nil {
h.t.Fatalf("verifying commit %q should have failed", commit.Hash)
}
var parentHash plumbing.Hash
if commit.Object.NumParents() > 0 {
parentHash = commit.Object.ParentHashes[0]
}
h.reset(parentHash, git.HardReset)
return commit
}
func (h *harness) assertCommitChange(
verifyExp verifyExpectation,
msg string,
sig sigcred.Signifier,
) Commit {
payUn, err := h.proj.NewPayloadChange(msg)
if err != nil {
h.t.Fatalf("creating PayloadChange: %v", err)
}
return h.tryCommit(verifyExp, payUn, sig)
}
func TestHasStagedChanges(t *testing.T) {
h := newHarness(t)
rootSig := h.stageNewAccount("root", false)
assertHasStaged := func(expHasStaged bool) {
hasStaged, err := h.proj.HasStagedChanges()
if err != nil {
t.Fatalf("error calling HasStagedChanges: %v", err)
} else if hasStaged != expHasStaged {
t.Fatalf("expected HasStagedChanges to return %v", expHasStaged)
}
}
// the harness starts with some staged changes
assertHasStaged(true)
h.stage(map[string]string{"foo": "bar"})
assertHasStaged(true)
h.assertCommitChange(verifyShouldSucceed, "first commit", rootSig)
assertHasStaged(false)
h.stage(map[string]string{"foo": ""}) // delete foo
assertHasStaged(true)
h.assertCommitChange(verifyShouldSucceed, "second commit", rootSig)
assertHasStaged(false)
}
// TestThisProjectStillVerifies opens this actual project and ensures that all
// commits in it still verify.
func TestThisProjectStillVerifies(t *testing.T) {
proj, err := OpenProject(".")
if err != nil {
t.Fatalf("error opening repo: %v", err)
}
headCommit, err := proj.GetHeadCommit()
if err != nil {
t.Fatalf("getting repo head: %v", err)
}
allCommits, err := proj.GetCommitRange(plumbing.ZeroHash, headCommit.Hash)
if err != nil {
t.Fatalf("getting all commits (up to %q): %v", headCommit.Hash, err)
}
checkedOutBranch, err := proj.ReferenceToBranchName(plumbing.HEAD)
if err != nil {
t.Fatalf("error determining checked out branch: %v", err)
}
if err := proj.VerifyCommits(checkedOutBranch, allCommits); err != nil {
t.Fatal(err)
}
}
func TestShortHashResolving(t *testing.T) {
// TODO ideally this test would test that conflicting hashes are noticed,
// but that's hard...
h := newHarness(t)
rootSig := h.stageNewAccount("root", false)
hash := h.assertCommitChange(verifyShouldSucceed, "first commit", rootSig).Hash
hashStr := hash.String()
t.Log(hashStr)
for i := 2; i < len(hashStr); i++ {
gotCommit, err := h.proj.GetCommitByRevision(plumbing.Revision(hashStr[:i]))
if err != nil {
t.Fatalf("resolving %q: %v", hashStr[:i], err)
} else if gotCommit.Hash != hash {
t.Fatalf("expected hash %q but got %q",
gotCommit.Hash, hash)
}
}
}

View File

@ -1,77 +0,0 @@
package sigcred
import (
"fmt"
"dehub.dev/src/dehub.git/typeobj"
)
// CredentialUnion represents a credential, signifying a user's approval of a
// payload. Exactly one field tagged with "type" should be set.
type CredentialUnion struct {
PGPSignature *CredentialPGPSignature `type:"pgp_signature"`
// AccountID specifies the account which generated this CredentialUnion.
//
// NOTE that credentials produced by the direct implementations of Signifier
// won't fill in this field, unless specifically documented. The Signifier
// produced by the Signifier() method of SignifierUnion _will_ fill this
// field in, however.
AccountID string `yaml:"account,omitempty"`
// AnonID specifies an identifier for the anonymous user which produced this
// credential. This field is mutually exclusive with AccountID, and won't be
// set by any Signifier implementation unless specifically documented.
AnonID string `yaml:"-"`
}
// MarshalYAML implements the yaml.Marshaler interface.
func (c CredentialUnion) MarshalYAML() (interface{}, error) {
return typeobj.MarshalYAML(c)
}
// UnmarshalYAML implements the yaml.Unmarshaler interface.
func (c *CredentialUnion) UnmarshalYAML(unmarshal func(interface{}) error) error {
return typeobj.UnmarshalYAML(c, unmarshal)
}
// ErrNotSelfVerifying is returned from the SelfVerify method of CredentialUnion
// when the credential does not implement the SelfVerifyingCredential interface.
// It may also be returned from the SelfVerify method of the
// SelfVerifyingCredential itself, if the credential can only self-verify under
// certain circumstances.
type ErrNotSelfVerifying struct {
// Subject is a descriptor of the value which could not be verified. It may
// be a type name or some other identifying piece of information.
Subject string
}
func (e ErrNotSelfVerifying) Error() string {
return fmt.Sprintf("%s cannot verify itself", e.Subject)
}
// SelfVerify will attempt to cast the credential as a SelfVerifyingCredential,
// and returns the result of the SelfVerify method being called on it.
func (c CredentialUnion) SelfVerify(data []byte) error {
el, _, err := typeobj.Element(c)
if err != nil {
return err
} else if selfVerifyingCred, ok := el.(SelfVerifyingCredential); !ok {
return ErrNotSelfVerifying{Subject: fmt.Sprintf("credential of type %T", el)}
} else if err := selfVerifyingCred.SelfVerify(data); err != nil {
return fmt.Errorf("self-verifying credential of type %T: %w", el, err)
}
return nil
}
// SelfVerifyingCredential is one which is able to prove its own authenticity by
// some means or another. It is not required for a Credential to implement this
// interface.
type SelfVerifyingCredential interface {
// SelfVerify should return nil if the Credential has successfully verified
// that it has accredited the given data, or an error describing why it
// could not do so. It may return ErrNotSelfVerifying if the Credential can
// only self-verify under certain circumstances, and those circumstances are
// not met.
SelfVerify(data []byte) error
}

View File

@ -1,58 +0,0 @@
package sigcred
import (
"errors"
"math/rand"
"testing"
"time"
)
func TestSelfVerifyingCredentials(t *testing.T) {
seed := time.Now().UnixNano()
t.Logf("seed: %d", seed)
rand := rand.New(rand.NewSource(seed))
tests := []struct {
descr string
mkCred func(toSign []byte) (CredentialUnion, error)
expErr bool
}{
{
descr: "pgp sig no body",
mkCred: func(toSign []byte) (CredentialUnion, error) {
privKey, _ := TestSignifierPGP("", false, rand)
return privKey.Sign(nil, toSign)
},
expErr: true,
},
{
descr: "pgp sig with body",
mkCred: func(toSign []byte) (CredentialUnion, error) {
privKey, _ := TestSignifierPGP("", true, rand)
return privKey.Sign(nil, toSign)
},
},
}
for _, test := range tests {
t.Run(test.descr, func(t *testing.T) {
data := make([]byte, rand.Intn(1024))
if _, err := rand.Read(data); err != nil {
t.Fatal(err)
}
cred, err := test.mkCred(data)
if err != nil {
t.Fatal(err)
}
err = cred.SelfVerify(data)
isNotSelfVerifying := errors.As(err, new(ErrNotSelfVerifying))
if test.expErr && !isNotSelfVerifying {
t.Fatalf("expected ErrNotSelfVerifying but got: %v", err)
} else if !test.expErr && err != nil {
t.Fatalf("unexpected error: %v", err)
}
})
}
}

View File

@ -1,320 +0,0 @@
package sigcred
import (
"bytes"
"crypto"
"crypto/sha256"
"errors"
"fmt"
"io"
"io/ioutil"
"os/exec"
"path/filepath"
"strings"
"dehub.dev/src/dehub.git/fs"
"dehub.dev/src/dehub.git/yamlutil"
"golang.org/x/crypto/openpgp"
"golang.org/x/crypto/openpgp/armor"
"golang.org/x/crypto/openpgp/packet"
)
// CredentialPGPSignature describes a PGP signature which has been used to sign
// a commit.
type CredentialPGPSignature struct {
PubKeyID string `yaml:"pub_key_id"`
PubKeyBody string `yaml:"pub_key_body,omitempty"`
Body yamlutil.Blob `yaml:"body"`
}
// SelfVerify will only work if PubKeyBody is filled in. If so, Body will
// attempt to be verified by that public key.
func (c *CredentialPGPSignature) SelfVerify(data []byte) error {
if c.PubKeyBody == "" {
return ErrNotSelfVerifying{
Subject: "PGP signature Credential with no pub_key_body field",
}
}
sig := SignifierPGP{Body: c.PubKeyBody}
return sig.Verify(nil, data, CredentialUnion{PGPSignature: c})
}
type pgpKey struct {
entity *openpgp.Entity
}
func newPGPPubKey(r io.Reader) (pgpKey, error) {
// TODO support non-armored keys as well
block, err := armor.Decode(r)
if err != nil {
return pgpKey{}, fmt.Errorf("could not decode armored PGP public key: %w", err)
}
entity, err := openpgp.ReadEntity(packet.NewReader(block.Body))
if err != nil {
return pgpKey{}, fmt.Errorf("could not read PGP public key: %w", err)
}
return pgpKey{entity: entity}, nil
}
func (s pgpKey) Sign(_ fs.FS, data []byte) (CredentialUnion, error) {
if s.entity.PrivateKey == nil {
return CredentialUnion{}, errors.New("private key not loaded")
}
h := sha256.New()
h.Write(data)
var sig packet.Signature
sig.Hash = crypto.SHA256
sig.PubKeyAlgo = s.entity.PrimaryKey.PubKeyAlgo
if err := sig.Sign(h, s.entity.PrivateKey, nil); err != nil {
return CredentialUnion{}, fmt.Errorf("signing data: %w", err)
}
body := new(bytes.Buffer)
if err := sig.Serialize(body); err != nil {
return CredentialUnion{}, fmt.Errorf("serializing signature: %w", err)
}
return CredentialUnion{
PGPSignature: &CredentialPGPSignature{
PubKeyID: s.entity.PrimaryKey.KeyIdString(),
Body: body.Bytes(),
},
}, nil
}
func (s pgpKey) Signed(_ fs.FS, cred CredentialUnion) (bool, error) {
if cred.PGPSignature == nil {
return false, nil
}
return cred.PGPSignature.PubKeyID == s.entity.PrimaryKey.KeyIdString(), nil
}
func (s pgpKey) Verify(_ fs.FS, data []byte, cred CredentialUnion) error {
credSig := cred.PGPSignature
if credSig == nil {
return fmt.Errorf("SignifierPGPFile cannot verify %+v", cred)
}
pkt, err := packet.Read(bytes.NewBuffer(credSig.Body))
if err != nil {
return fmt.Errorf("could not read signature packet: %w", err)
}
sigPkt, ok := pkt.(*packet.Signature)
if !ok {
return fmt.Errorf("signature bytes were parsed as a %T, not a signature", pkt)
}
// The gpg process which is invoked during normal signing automatically
// hashes whatever is piped to it. The VerifySignature method in the openpgp
// package expects you to do it yourself.
h := sigPkt.Hash.New()
h.Write(data)
return s.entity.PrimaryKey.VerifySignature(h, sigPkt)
}
func (s pgpKey) MarshalBinary() ([]byte, error) {
body := new(bytes.Buffer)
armorEncoder, err := armor.Encode(body, "PGP PUBLIC KEY", nil)
if err != nil {
return nil, fmt.Errorf("initializing armor encoder: %w", err)
} else if err := s.entity.Serialize(armorEncoder); err != nil {
return nil, fmt.Errorf("encoding public key: %w", err)
} else if err := armorEncoder.Close(); err != nil {
return nil, fmt.Errorf("closing armor encoder: %w", err)
}
return body.Bytes(), nil
}
func (s pgpKey) userID() (*packet.UserId, error) {
if l := len(s.entity.Identities); l == 0 {
return nil, errors.New("pgp key has no identity information")
} else if l > 1 {
return nil, errors.New("multiple identities on a single pgp key is unsupported")
}
var identity *openpgp.Identity
for _, identity = range s.entity.Identities {
break
}
return identity.UserId, nil
}
func anonPGPSignifier(pgpKey pgpKey, sig Signifier) (Signifier, error) {
keyID := pgpKey.entity.PrimaryKey.KeyIdString()
userID, err := pgpKey.userID()
if err != nil {
return nil, err
}
pubKeyBody, err := pgpKey.MarshalBinary()
if err != nil {
return nil, err
}
return signifierMiddleware{
Signifier: sig,
signCallback: func(cred *CredentialUnion) {
cred.PGPSignature.PubKeyBody = string(pubKeyBody)
cred.AnonID = fmt.Sprintf("%s %q", keyID, userID.Email)
},
}, nil
}
// TestSignifierPGP returns a direct implementation of Signifier which uses a
// random private key generated in memory, as well as an armored version of its
// public key.
//
// NOTE that the key returned is very weak, and should only be used for tests.
func TestSignifierPGP(name string, anon bool, randReader io.Reader) (Signifier, []byte) {
entity, err := openpgp.NewEntity(name, "", name+"@example.com", &packet.Config{
Rand: randReader,
RSABits: 512,
})
if err != nil {
panic(err)
}
pgpKey := pgpKey{entity: entity}
pubKeyBody, err := pgpKey.MarshalBinary()
if err != nil {
panic(err)
}
if anon {
sigInt, err := anonPGPSignifier(pgpKey, pgpKey)
if err != nil {
panic(err)
}
return sigInt, pubKeyBody
}
return accountSignifier(name, pgpKey), pubKeyBody
}
// SignifierPGP describes a pgp public key whose corresponding private key will
// be used as a signing key. The public key can be described by one of multiple
// fields, each being a different method of loading the public key. Only one
// field should be set.
type SignifierPGP struct {
// An armored string encoding of the public key, as exported via
// `gpg -a --export <key-id>`
Body string `yaml:"body,omitempty"`
// Path, relative to the root of the repo, of the armored public key file.
Path string `yaml:"path,omitempty"`
}
var _ Signifier = SignifierPGP{}
func cmdGPG(stdin []byte, args ...string) ([]byte, error) {
args = append([]string{"--openpgp"}, args...)
stderr := new(bytes.Buffer)
cmd := exec.Command("gpg", args...)
cmd.Stdin = bytes.NewBuffer(stdin)
cmd.Stderr = stderr
out, err := cmd.Output()
if err != nil {
return nil, fmt.Errorf("calling gpg command (%v): %s", err, stderr.String())
}
return out, nil
}
// LoadSignifierPGP loads a pgp key using the given identifier. The key is
// assumed to be stored in the client's keyring already.
//
// If this is being called for an anonymous user to use, then anon can be set to
// true. This will have the effect of setting the PubKeyBody and AnonID of all
// produced credentials.
func LoadSignifierPGP(keyID string, anon bool) (Signifier, error) {
pubKey, err := cmdGPG(nil, "-a", "--export", keyID)
if err != nil {
return nil, fmt.Errorf("loading public key: %w", err)
} else if len(pubKey) == 0 {
return nil, fmt.Errorf("no public key found for %q", keyID)
}
sig := &SignifierPGP{Body: string(pubKey)}
if !anon {
return sig, nil
}
pgpKey, err := sig.load(nil)
if err != nil {
return nil, err
}
return anonPGPSignifier(pgpKey, sig)
}
func (s SignifierPGP) load(fs fs.FS) (pgpKey, error) {
if s.Body != "" {
return newPGPPubKey(strings.NewReader(s.Body))
}
path := filepath.Clean(s.Path)
fr, err := fs.Open(path)
if err != nil {
return pgpKey{}, fmt.Errorf("opening PGP public key file at %q: %w", path, err)
}
defer fr.Close()
pubKeyB, err := ioutil.ReadAll(fr)
if err != nil {
return pgpKey{}, fmt.Errorf("reading PGP public key from file at %q: %w", s.Path, err)
}
return SignifierPGP{Body: string(pubKeyB)}.load(fs)
}
// Sign will sign the given arbitrary bytes using the private key corresponding
// to the pgp public key embedded in this Signifier.
func (s SignifierPGP) Sign(fs fs.FS, data []byte) (CredentialUnion, error) {
sigPGP, err := s.load(fs)
if err != nil {
return CredentialUnion{}, err
}
keyID := sigPGP.entity.PrimaryKey.KeyIdString()
sig, err := cmdGPG(data, "--detach-sign", "--local-user", keyID)
if err != nil {
return CredentialUnion{}, fmt.Errorf("signing with pgp key: %w", err)
}
return CredentialUnion{
PGPSignature: &CredentialPGPSignature{
PubKeyID: keyID,
Body: sig,
},
}, nil
}
// Signed returns true if the private key corresponding to the pgp public key
// embedded in this Signifier was used to produce the given Credential.
func (s SignifierPGP) Signed(fs fs.FS, cred CredentialUnion) (bool, error) {
sigPGP, err := s.load(fs)
if err != nil {
return false, err
}
return sigPGP.Signed(fs, cred)
}
// Verify asserts that the given signature was produced by this key signing the
// given piece of data.
func (s SignifierPGP) Verify(fs fs.FS, data []byte, cred CredentialUnion) error {
sigPGP, err := s.load(fs)
if err != nil {
return err
}
return sigPGP.Verify(fs, data, cred)
}
// SignifierPGPFile is deprecated and should not be used, use the Path field of
// SignifierPGP instead.
type SignifierPGPFile struct {
Path string `yaml:"path"`
}

View File

@ -1,66 +0,0 @@
package sigcred
import (
"math/rand"
"testing"
"time"
"dehub.dev/src/dehub.git/fs"
)
// There are not currently tests for testing pgp signature creation, as they
// require calls out to the gpg executable. Wrapping tests in docker containers
// would make this doable.
func TestPGPVerification(t *testing.T) {
tests := []struct {
descr string
init func(pubKeyBody []byte) (Signifier, fs.FS)
}{
{
descr: "SignifierPGP Body",
init: func(pubKeyBody []byte) (Signifier, fs.FS) {
return SignifierPGP{Body: string(pubKeyBody)}, nil
},
},
{
descr: "SignifierPGP Path",
init: func(pubKeyBody []byte) (Signifier, fs.FS) {
pubKeyPath := "some/dir/pubkey.asc"
fs := fs.Stub{pubKeyPath: pubKeyBody}
return SignifierPGP{Path: pubKeyPath}, fs
},
},
}
for _, test := range tests {
t.Run(test.descr, func(t *testing.T) {
seed := time.Now().UnixNano()
t.Logf("seed: %d", seed)
rand := rand.New(rand.NewSource(seed))
privKey, pubKeyBody := TestSignifierPGP("", false, rand)
sig, fs := test.init(pubKeyBody)
data := make([]byte, rand.Intn(1024))
if _, err := rand.Read(data); err != nil {
t.Fatal(err)
}
cred, err := privKey.Sign(nil, data)
if err != nil {
t.Fatal(err)
}
signed, err := sig.Signed(fs, cred)
if err != nil {
t.Fatal(err)
} else if !signed {
t.Fatal("expected signed to be true")
}
if err := sig.Verify(fs, data, cred); err != nil {
t.Fatal(err)
}
})
}
}

View File

@ -1,5 +0,0 @@
// Package sigcred implements the Signifier and Credential types, which
// interplay together to provide the ability to sign arbitrary blobs of data
// (producing Credentials) and to verify those Credentials within the context of
// a dehub repo.
package sigcred

View File

@ -1,95 +0,0 @@
package sigcred
import (
"dehub.dev/src/dehub.git/fs"
"dehub.dev/src/dehub.git/typeobj"
)
// Signifier describes the methods that all signifiers must implement.
type Signifier interface {
// Sign returns a credential containing a signature of the given data.
//
// tree can be used to find the Signifier at a particular snapshot.
Sign(fs.FS, []byte) (CredentialUnion, error)
// Signed returns true if the Signifier was used to sign the credential.
Signed(fs.FS, CredentialUnion) (bool, error)
// Verify asserts that the Signifier produced the given credential for the
// given data set, or returns an error.
//
// tree can be used to find the Signifier at a particular snapshot.
Verify(fs.FS, []byte, CredentialUnion) error
}
// SignifierUnion represents a single signifier for an account. Only one field
// should be set on each SignifierUnion.
type SignifierUnion struct {
PGPPublicKey *SignifierPGP `type:"pgp_public_key"`
// LegacyPGPPublicKeyFile is deprecated, only PGPPublicKey should be used
LegacyPGPPublicKeyFile *SignifierPGPFile `type:"pgp_public_key_file"`
}
// MarshalYAML implements the yaml.Marshaler interface.
func (s SignifierUnion) MarshalYAML() (interface{}, error) {
return typeobj.MarshalYAML(s)
}
// UnmarshalYAML implements the yaml.Unmarshaler interface.
func (s *SignifierUnion) UnmarshalYAML(unmarshal func(interface{}) error) error {
if err := typeobj.UnmarshalYAML(s, unmarshal); err != nil {
return err
}
// TODO deprecate PGPPublicKeyFile
if s.LegacyPGPPublicKeyFile != nil {
s.PGPPublicKey = &SignifierPGP{Path: s.LegacyPGPPublicKeyFile.Path}
s.LegacyPGPPublicKeyFile = nil
}
return nil
}
// Signifier returns the Signifier instance encapsulated by this SignifierUnion.
//
// This will panic if no Signifier field is populated.
//
// accountID is given so as to automatically fill the AccountID field of
// credentials returned from Sign, since the underlying implementation doesn't
// know what account it's signing for.
func (s SignifierUnion) Signifier(accountID string) Signifier {
el, _, err := typeobj.Element(s)
if err != nil {
panic(err)
}
return accountSignifier(accountID, el.(Signifier))
}
type signifierMiddleware struct {
Signifier
signCallback func(*CredentialUnion)
}
func (sm signifierMiddleware) Sign(fs fs.FS, data []byte) (CredentialUnion, error) {
cred, err := sm.Signifier.Sign(fs, data)
if err != nil || sm.signCallback == nil {
return cred, err
}
sm.signCallback(&cred)
return cred, nil
}
// accountSignifier wraps a Signifier to always set the accountID field on
// credentials it produces via the Sign method.
//
// TODO accountSignifier shouldn't be necessary, it's very ugly. It indicates
// that CredentialUnion probably shouldn't have AccountID on it, which makes
// sense. Some refactoring is required here.
func accountSignifier(accountID string, sig Signifier) Signifier {
return signifierMiddleware{
Signifier: sig,
signCallback: func(cred *CredentialUnion) {
cred.AccountID = accountID
},
}
}

View File

@ -1,221 +0,0 @@
// Package typeobj implements a set of utility functions intended to be used on
// union structs whose fields are tagged with the "type" tag and which expect
// only one of the fields to be set. For example:
//
// type OuterType struct {
// A *InnerTypeA `type:"a"`
// B *InnerTypeB `type:"b"`
// C *InnerTypeC `type:"c"`
// }
//
package typeobj
import (
"errors"
"fmt"
"reflect"
"strings"
)
type tagInfo struct {
val string
isDefault bool
}
func parseTag(tag string) tagInfo {
parts := strings.Split(tag, ",")
return tagInfo{
val: parts[0],
isDefault: len(parts) > 1 && parts[1] == "default",
}
}
// structTypeWithYAMLTags takes a type of kind struct and returns that same
// type, except all fields with a "type" tag will also have a `yaml:"-"` tag
// attached.
func structTypeWithYAMLTags(typ reflect.Type) (reflect.Type, error) {
n := typ.NumField()
outFields := make([]reflect.StructField, n)
for i := 0; i < n; i++ {
field := typ.Field(i)
hasTypeTag := field.Tag.Get("type") != ""
if hasTypeTag && field.Tag.Get("yaml") != "" {
return nil, fmt.Errorf("field %s has yaml tag and type tag", field.Name)
} else if hasTypeTag {
field.Tag += ` yaml:"-"`
}
outFields[i] = field
}
return reflect.StructOf(outFields), nil
}
func findTypeField(val reflect.Value, targetTypeTag string) (reflect.Value, reflect.StructField, error) {
typ := val.Type()
var defVal reflect.Value
var defTyp reflect.StructField
var defOk bool
for i := 0; i < val.NumField(); i++ {
fieldVal, fieldTyp := val.Field(i), typ.Field(i)
tagInfo := parseTag(fieldTyp.Tag.Get("type"))
if targetTypeTag != "" && tagInfo.val == targetTypeTag {
return fieldVal, fieldTyp, nil
} else if targetTypeTag == "" && tagInfo.isDefault {
defVal, defTyp, defOk = fieldVal, fieldTyp, true
}
}
if targetTypeTag == "" && defOk {
return defVal, defTyp, nil
} else if targetTypeTag == "" {
return reflect.Value{}, reflect.StructField{}, errors.New("type field not set")
}
return reflect.Value{}, reflect.StructField{}, fmt.Errorf("invalid type value %q", targetTypeTag)
}
// UnmarshalYAML is intended to be used within the UnmarshalYAML method of a
// union struct. It will use the given input data's "type" field and match that
// to the struct field tagged with that value. it will then unmarshal the input
// data into that inner field.
func UnmarshalYAML(i interface{}, unmarshal func(interface{}) error) error {
val := reflect.Indirect(reflect.ValueOf(i))
if !val.CanSet() || val.Kind() != reflect.Struct {
return fmt.Errorf("cannot unmarshal into value of type %T: must be a struct pointer", i)
}
// create a copy of the struct type, with `yaml:"-"` tags added to all
// fields with `type:"..."` tags. If we didn't do this then there would be
// conflicts in the next step if a type field's name was the same as one of
// its inner field names.
valTypeCP, err := structTypeWithYAMLTags(val.Type())
if err != nil {
return fmt.Errorf("cannot unmarshal into value of type %T: %w", i, err)
}
// unmarshal in all non-typeobj fields. construct a type which wraps the
// given one, hiding its UnmarshalYAML method (if it has one), and unmarshal
// onto that directly. The "type" field is also unmarshaled at this stage.
valWrap := reflect.New(reflect.StructOf([]reflect.StructField{
{Name: "Type", Type: typeOfString, Tag: `yaml:"type"`},
{Name: "Val", Type: valTypeCP, Tag: `yaml:",inline"`},
}))
if err := unmarshal(valWrap.Interface()); err != nil {
return err
}
// set non-type fields into the original value
valWrapInnerVal := valWrap.Elem().Field(1)
for i := 0; i < valWrapInnerVal.NumField(); i++ {
fieldVal, fieldTyp := valWrapInnerVal.Field(i), valTypeCP.Field(i)
if fieldTyp.Tag.Get("type") != "" {
continue
}
val.Field(i).Set(fieldVal)
}
typeVal := valWrap.Elem().Field(0).String()
fieldVal, fieldTyp, err := findTypeField(val, typeVal)
if err != nil {
return err
}
var valInto interface{}
if fieldVal.Kind() == reflect.Ptr {
newFieldVal := reflect.New(fieldTyp.Type.Elem())
fieldVal.Set(newFieldVal)
valInto = newFieldVal.Interface()
} else {
valInto = fieldVal.Addr().Interface()
}
return unmarshal(valInto)
}
// val should be of kind struct
func element(val reflect.Value) (reflect.Value, string, []int, error) {
typ := val.Type()
numFields := val.NumField()
var fieldVal reflect.Value
var typeTag string
nonTypeFields := make([]int, 0, numFields)
for i := 0; i < numFields; i++ {
innerFieldVal := val.Field(i)
innerTagInfo := parseTag(typ.Field(i).Tag.Get("type"))
if innerTagInfo.val == "" {
nonTypeFields = append(nonTypeFields, i)
} else if innerFieldVal.IsZero() {
continue
} else {
fieldVal = innerFieldVal
typeTag = innerTagInfo.val
}
}
if !fieldVal.IsValid() {
return reflect.Value{}, "", nil, errors.New(`no non-zero fields tagged with "type"`)
}
return fieldVal, typeTag, nonTypeFields, nil
}
// Element returns the value of the first non-zero field tagged with "type", as
// well as the value of the "type" tag.
func Element(i interface{}) (interface{}, string, error) {
val := reflect.Indirect(reflect.ValueOf(i))
fieldVal, tag, _, err := element(val)
if err != nil {
return fieldVal, tag, err
}
return fieldVal.Interface(), tag, nil
}
var typeOfString = reflect.TypeOf("string")
// MarshalYAML is intended to be used within the MarshalYAML method of a union
// struct. It will find the first field of the given struct which has a "type"
// tag and is non-zero. It will then marshal that field's value, inlining an
// extra YAML field "type" whose value is the value of the "type" tag on the
// struct field, and return that.
func MarshalYAML(i interface{}) (interface{}, error) {
val := reflect.Indirect(reflect.ValueOf(i))
typ := val.Type()
fieldVal, typeTag, nonTypeFields, err := element(val)
if err != nil {
return nil, err
}
fieldVal = reflect.Indirect(fieldVal)
if fieldVal.Kind() != reflect.Struct {
return nil, fmt.Errorf("cannot marshal non-struct type %T", fieldVal.Interface())
}
structFields := make([]reflect.StructField, 0, len(nonTypeFields)+2)
structFields = append(structFields,
reflect.StructField{
Name: "Type",
Type: typeOfString,
Tag: `yaml:"type"`,
},
reflect.StructField{
Name: "Val",
Type: fieldVal.Type(),
Tag: `yaml:",inline"`,
},
)
nonTypeFieldVals := make([]reflect.Value, len(nonTypeFields))
for i, fieldIndex := range nonTypeFields {
fieldVal, fieldType := val.Field(fieldIndex), typ.Field(fieldIndex)
structFields = append(structFields, fieldType)
nonTypeFieldVals[i] = fieldVal
}
outVal := reflect.New(reflect.StructOf(structFields))
outVal.Elem().Field(0).Set(reflect.ValueOf(typeTag))
outVal.Elem().Field(1).Set(fieldVal)
for i, fieldVal := range nonTypeFieldVals {
outVal.Elem().Field(2 + i).Set(fieldVal)
}
return outVal.Interface(), nil
}

View File

@ -1,169 +0,0 @@
package typeobj
import (
"reflect"
"strings"
"testing"
"github.com/davecgh/go-spew/spew"
"gopkg.in/yaml.v2"
)
type foo struct {
A int `yaml:"a"`
}
type bar struct {
B int `yaml:"b"`
}
// baz has a field of the same name as the type, which is tricky
type baz struct {
Baz int `yaml:"baz"`
}
type outer struct {
Foo foo `type:"foo"`
Bar *bar `type:"bar"`
Baz baz `type:"baz"`
Other string `yaml:"other_field,omitempty"`
}
func (o outer) MarshalYAML() (interface{}, error) {
return MarshalYAML(o)
}
func (o *outer) UnmarshalYAML(unmarshal func(interface{}) error) error {
return UnmarshalYAML(o, unmarshal)
}
type outerWDefault struct {
Foo foo `type:"foo,default"`
Bar *bar `type:"bar"`
}
func (o outerWDefault) MarshalYAML() (interface{}, error) {
return MarshalYAML(o)
}
func (o *outerWDefault) UnmarshalYAML(unmarshal func(interface{}) error) error {
return UnmarshalYAML(o, unmarshal)
}
func TestTypeObj(t *testing.T) {
type test struct {
descr string
str string
expErr string
expObj interface{}
expTypeTag string
expElem interface{}
expMarshalOut string // defaults to str
}
tests := []test{
{
descr: "no type set",
str: `{}`,
expErr: "type field not set",
expObj: outer{},
},
{
descr: "no type set with nontype field",
str: `other_field: aaa`,
expErr: "type field not set",
expObj: outer{},
},
{
descr: "no type set with default",
str: `a: 1`,
expObj: outerWDefault{Foo: foo{A: 1}},
expTypeTag: "foo",
expElem: foo{A: 1},
expMarshalOut: "type: foo\na: 1",
},
{
descr: "invalid type value",
str: "type: INVALID",
expErr: "invalid type value",
expObj: outer{},
},
{
descr: "foo set",
str: "type: foo\na: 1",
expObj: outer{Foo: foo{A: 1}},
expTypeTag: "foo",
expElem: foo{A: 1},
},
{
descr: "bar set",
str: "type: bar\nb: 1",
expObj: outer{Bar: &bar{B: 1}},
expTypeTag: "bar",
expElem: &bar{B: 1},
},
{
descr: "foo and other_field set",
str: "type: foo\na: 1\nother_field: aaa",
expObj: outer{Foo: foo{A: 1}, Other: "aaa"},
expTypeTag: "foo",
expElem: foo{A: 1},
},
{
descr: "type is same as field name",
str: "type: baz\nbaz: 3",
expObj: outer{Baz: baz{Baz: 3}},
expTypeTag: "baz",
expElem: baz{Baz: 3},
},
}
for _, test := range tests {
t.Run(test.descr, func(t *testing.T) {
intoV := reflect.New(reflect.TypeOf(test.expObj))
err := yaml.Unmarshal([]byte(test.str), intoV.Interface())
if test.expErr != "" {
if err == nil || !strings.HasPrefix(err.Error(), test.expErr) {
t.Fatalf("expected error %q when unmarshaling but got: %v", test.expErr, err)
}
return
} else if test.expErr == "" && err != nil {
t.Fatalf("unmarshaling %q returned unexpected error: %v", test.str, err)
}
into := intoV.Elem().Interface()
if !reflect.DeepEqual(into, test.expObj) {
t.Fatalf("test expected value:\n%s\nbut got value:\n%s", spew.Sprint(test.expObj), spew.Sprint(into))
}
elem, typeTag, err := Element(into)
if err != nil {
t.Fatalf("error when calling Element(%s): %v", spew.Sprint(into), err)
} else if !reflect.DeepEqual(elem, test.expElem) {
t.Fatalf("test expected elem value:\n%s\nbut got value:\n%s", spew.Sprint(test.expElem), spew.Sprint(elem))
} else if typeTag != test.expTypeTag {
t.Fatalf("test expected typeTag value %q but got %q", test.expTypeTag, typeTag)
}
expMarshalOut := test.expMarshalOut
if expMarshalOut == "" {
expMarshalOut = test.str
}
expMarshalOut = strings.TrimSpace(expMarshalOut)
b, err := yaml.Marshal(into)
if err != nil {
t.Fatalf("error marshaling %s: %v", spew.Sprint(into), err)
}
marshalOut := strings.TrimSpace(string(b))
if marshalOut != expMarshalOut {
t.Fatalf("test expected to marshal to %q, but instead marshaled to %q", expMarshalOut, marshalOut)
}
})
}
}

View File

@ -1,36 +0,0 @@
// Package yamlutil contains utility types which are useful for dealing with the
// yaml package.
package yamlutil
import (
"encoding/base64"
)
// Blob encodes and decodes a byte slice as a standard base-64 encoded yaml
// string.
type Blob []byte
func (b Blob) String() string {
return base64.StdEncoding.EncodeToString([]byte(b))
}
// MarshalYAML implements the yaml.Marshaler interface.
func (b Blob) MarshalYAML() (interface{}, error) {
return base64.StdEncoding.EncodeToString([]byte(b)), nil
}
// UnmarshalYAML implements the yaml.Unmarshaler interface.
func (b *Blob) UnmarshalYAML(unmarshal func(interface{}) error) error {
var b64 string
if err := unmarshal(&b64); err != nil {
return err
}
b64Dec, err := base64.StdEncoding.DecodeString(b64)
if err != nil {
return err
}
*b = b64Dec
return nil
}

View File

@ -1,55 +0,0 @@
package yamlutil
import (
"bytes"
"testing"
yaml "gopkg.in/yaml.v2"
)
func TestBlob(t *testing.T) {
testCases := []struct {
descr string
in Blob
exp string
}{
{
descr: "empty",
in: Blob(""),
exp: `""`,
},
{
descr: "zero",
in: Blob{0},
exp: "AA==",
},
{
descr: "zeros",
in: Blob{0, 0, 0},
exp: "AAAA",
},
{
descr: "foo",
in: Blob("foo"),
exp: "Zm9v",
},
}
for _, test := range testCases {
t.Run(test.descr, func(t *testing.T) {
out, err := yaml.Marshal(test.in)
if err != nil {
t.Fatalf("error marshaling %q: %v", test.in, err)
} else if test.exp+"\n" != string(out) {
t.Fatalf("marshal exp:%q got:%q", test.exp+"\n", out)
}
var blob Blob
if err := yaml.Unmarshal(out, &blob); err != nil {
t.Fatalf("error unmarshaling %q: %v", out, err)
} else if !bytes.Equal([]byte(blob), []byte(test.in)) {
t.Fatalf("unmarshal exp:%q got:%q", test.in, blob)
}
})
}
}