mirror of
https://github.com/seaweedfs/seaweedfs.git
synced 2024-01-19 02:48:24 +00:00
commit
0a57db7026
|
@ -22,6 +22,7 @@ type S3Options struct {
|
||||||
filer *string
|
filer *string
|
||||||
filerBucketsPath *string
|
filerBucketsPath *string
|
||||||
port *int
|
port *int
|
||||||
|
config *string
|
||||||
domainName *string
|
domainName *string
|
||||||
tlsPrivateKey *string
|
tlsPrivateKey *string
|
||||||
tlsCertificate *string
|
tlsCertificate *string
|
||||||
|
@ -33,15 +34,63 @@ func init() {
|
||||||
s3StandaloneOptions.filerBucketsPath = cmdS3.Flag.String("filer.dir.buckets", "/buckets", "folder on filer to store all buckets")
|
s3StandaloneOptions.filerBucketsPath = cmdS3.Flag.String("filer.dir.buckets", "/buckets", "folder on filer to store all buckets")
|
||||||
s3StandaloneOptions.port = cmdS3.Flag.Int("port", 8333, "s3 server http listen port")
|
s3StandaloneOptions.port = cmdS3.Flag.Int("port", 8333, "s3 server http listen port")
|
||||||
s3StandaloneOptions.domainName = cmdS3.Flag.String("domainName", "", "suffix of the host name, {bucket}.{domainName}")
|
s3StandaloneOptions.domainName = cmdS3.Flag.String("domainName", "", "suffix of the host name, {bucket}.{domainName}")
|
||||||
|
s3StandaloneOptions.config = cmdS3.Flag.String("config", "", "path to the config file")
|
||||||
s3StandaloneOptions.tlsPrivateKey = cmdS3.Flag.String("key.file", "", "path to the TLS private key file")
|
s3StandaloneOptions.tlsPrivateKey = cmdS3.Flag.String("key.file", "", "path to the TLS private key file")
|
||||||
s3StandaloneOptions.tlsCertificate = cmdS3.Flag.String("cert.file", "", "path to the TLS certificate file")
|
s3StandaloneOptions.tlsCertificate = cmdS3.Flag.String("cert.file", "", "path to the TLS certificate file")
|
||||||
}
|
}
|
||||||
|
|
||||||
var cmdS3 = &Command{
|
var cmdS3 = &Command{
|
||||||
UsageLine: "s3 -port=8333 -filer=<ip:port>",
|
UsageLine: "s3 [-port=8333] [-filer=<ip:port>] [-config=</path/to/config.json>]",
|
||||||
Short: "start a s3 API compatible server that is backed by a filer",
|
Short: "start a s3 API compatible server that is backed by a filer",
|
||||||
Long: `start a s3 API compatible server that is backed by a filer.
|
Long: `start a s3 API compatible server that is backed by a filer.
|
||||||
|
|
||||||
|
By default, you can use any access key and secret key to access the S3 APIs.
|
||||||
|
To enable credential based access, create a config.json file similar to this:
|
||||||
|
|
||||||
|
{
|
||||||
|
"identities": [
|
||||||
|
{
|
||||||
|
"name": "some_name",
|
||||||
|
"credentials": [
|
||||||
|
{
|
||||||
|
"accessKey": "some_access_key1",
|
||||||
|
"secretKey": "some_secret_key1"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"actions": [
|
||||||
|
"Admin",
|
||||||
|
"Read",
|
||||||
|
"Write"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "some_read_only_user",
|
||||||
|
"credentials": [
|
||||||
|
{
|
||||||
|
"accessKey": "some_access_key2",
|
||||||
|
"secretKey": "some_secret_key2"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"actions": [
|
||||||
|
"Read"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "some_normal_user",
|
||||||
|
"credentials": [
|
||||||
|
{
|
||||||
|
"accessKey": "some_access_key3",
|
||||||
|
"secretKey": "some_secret_key3"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"actions": [
|
||||||
|
"Read",
|
||||||
|
"Write"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
`,
|
`,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -66,6 +115,7 @@ func (s3opt *S3Options) startS3Server() bool {
|
||||||
_, s3ApiServer_err := s3api.NewS3ApiServer(router, &s3api.S3ApiServerOption{
|
_, s3ApiServer_err := s3api.NewS3ApiServer(router, &s3api.S3ApiServerOption{
|
||||||
Filer: *s3opt.filer,
|
Filer: *s3opt.filer,
|
||||||
FilerGrpcAddress: filerGrpcAddress,
|
FilerGrpcAddress: filerGrpcAddress,
|
||||||
|
Config: *s3opt.config,
|
||||||
DomainName: *s3opt.domainName,
|
DomainName: *s3opt.domainName,
|
||||||
BucketsPath: *s3opt.filerBucketsPath,
|
BucketsPath: *s3opt.filerBucketsPath,
|
||||||
GrpcDialOption: security.LoadClientTLS(util.GetViper(), "grpc.client"),
|
GrpcDialOption: security.LoadClientTLS(util.GetViper(), "grpc.client"),
|
||||||
|
|
|
@ -97,6 +97,7 @@ func init() {
|
||||||
s3Options.domainName = cmdServer.Flag.String("s3.domainName", "", "suffix of the host name, {bucket}.{domainName}")
|
s3Options.domainName = cmdServer.Flag.String("s3.domainName", "", "suffix of the host name, {bucket}.{domainName}")
|
||||||
s3Options.tlsPrivateKey = cmdServer.Flag.String("s3.key.file", "", "path to the TLS private key file")
|
s3Options.tlsPrivateKey = cmdServer.Flag.String("s3.key.file", "", "path to the TLS private key file")
|
||||||
s3Options.tlsCertificate = cmdServer.Flag.String("s3.cert.file", "", "path to the TLS certificate file")
|
s3Options.tlsCertificate = cmdServer.Flag.String("s3.cert.file", "", "path to the TLS certificate file")
|
||||||
|
s3Options.config = cmdServer.Flag.String("s3.config", "", "path to the config file")
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,5 +6,6 @@ gen:
|
||||||
protoc master.proto --go_out=plugins=grpc:./master_pb
|
protoc master.proto --go_out=plugins=grpc:./master_pb
|
||||||
protoc volume_server.proto --go_out=plugins=grpc:./volume_server_pb
|
protoc volume_server.proto --go_out=plugins=grpc:./volume_server_pb
|
||||||
protoc filer.proto --go_out=plugins=grpc:./filer_pb
|
protoc filer.proto --go_out=plugins=grpc:./filer_pb
|
||||||
|
protoc iam.proto --go_out=plugins=grpc:./iam_pb
|
||||||
# protoc filer.proto --java_out=../../other/java/client/src/main/java
|
# protoc filer.proto --java_out=../../other/java/client/src/main/java
|
||||||
cp filer.proto ../../other/java/client/src/main/proto
|
cp filer.proto ../../other/java/client/src/main/proto
|
||||||
|
|
50
weed/pb/iam.proto
Normal file
50
weed/pb/iam.proto
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
package iam_pb;
|
||||||
|
|
||||||
|
option java_package = "seaweedfs.client";
|
||||||
|
option java_outer_classname = "IamProto";
|
||||||
|
|
||||||
|
//////////////////////////////////////////////////
|
||||||
|
|
||||||
|
service SeaweedIdentityAccessManagement {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
//////////////////////////////////////////////////
|
||||||
|
|
||||||
|
message S3ApiConfiguration {
|
||||||
|
repeated Identity identities = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Identity {
|
||||||
|
string name = 1;
|
||||||
|
repeated Credential credentials = 2;
|
||||||
|
repeated string actions = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Credential {
|
||||||
|
string access_key = 1;
|
||||||
|
string secret_key = 2;
|
||||||
|
// uint64 expiration = 3;
|
||||||
|
// bool is_disabled = 4;
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
message Policy {
|
||||||
|
repeated Statement statements = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Statement {
|
||||||
|
repeated Action action = 1;
|
||||||
|
repeated Resource resource = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Action {
|
||||||
|
string action = 1;
|
||||||
|
}
|
||||||
|
message Resource {
|
||||||
|
string bucket = 1;
|
||||||
|
// string path = 2;
|
||||||
|
}
|
||||||
|
*/
|
174
weed/pb/iam_pb/iam.pb.go
Normal file
174
weed/pb/iam_pb/iam.pb.go
Normal file
|
@ -0,0 +1,174 @@
|
||||||
|
// Code generated by protoc-gen-go.
|
||||||
|
// source: iam.proto
|
||||||
|
// DO NOT EDIT!
|
||||||
|
|
||||||
|
/*
|
||||||
|
Package iam_pb is a generated protocol buffer package.
|
||||||
|
|
||||||
|
It is generated from these files:
|
||||||
|
iam.proto
|
||||||
|
|
||||||
|
It has these top-level messages:
|
||||||
|
Identities
|
||||||
|
Identity
|
||||||
|
Credential
|
||||||
|
*/
|
||||||
|
package iam_pb
|
||||||
|
|
||||||
|
import proto "github.com/golang/protobuf/proto"
|
||||||
|
import fmt "fmt"
|
||||||
|
import math "math"
|
||||||
|
|
||||||
|
import (
|
||||||
|
context "golang.org/x/net/context"
|
||||||
|
grpc "google.golang.org/grpc"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Reference imports to suppress errors if they are not otherwise used.
|
||||||
|
var _ = proto.Marshal
|
||||||
|
var _ = fmt.Errorf
|
||||||
|
var _ = math.Inf
|
||||||
|
|
||||||
|
// This is a compile-time assertion to ensure that this generated file
|
||||||
|
// is compatible with the proto package it is being compiled against.
|
||||||
|
// A compilation error at this line likely means your copy of the
|
||||||
|
// proto package needs to be updated.
|
||||||
|
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||||
|
|
||||||
|
type S3ApiConfiguration struct {
|
||||||
|
Identities []*Identity `protobuf:"bytes,1,rep,name=identities" json:"identities,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *S3ApiConfiguration) Reset() { *m = S3ApiConfiguration{} }
|
||||||
|
func (m *S3ApiConfiguration) String() string { return proto.CompactTextString(m) }
|
||||||
|
func (*S3ApiConfiguration) ProtoMessage() {}
|
||||||
|
func (*S3ApiConfiguration) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
|
||||||
|
|
||||||
|
func (m *S3ApiConfiguration) GetIdentities() []*Identity {
|
||||||
|
if m != nil {
|
||||||
|
return m.Identities
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type Identity struct {
|
||||||
|
Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
|
||||||
|
Credentials []*Credential `protobuf:"bytes,2,rep,name=credentials" json:"credentials,omitempty"`
|
||||||
|
Actions []string `protobuf:"bytes,3,rep,name=actions" json:"actions,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Identity) Reset() { *m = Identity{} }
|
||||||
|
func (m *Identity) String() string { return proto.CompactTextString(m) }
|
||||||
|
func (*Identity) ProtoMessage() {}
|
||||||
|
func (*Identity) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} }
|
||||||
|
|
||||||
|
func (m *Identity) GetName() string {
|
||||||
|
if m != nil {
|
||||||
|
return m.Name
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Identity) GetCredentials() []*Credential {
|
||||||
|
if m != nil {
|
||||||
|
return m.Credentials
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Identity) GetActions() []string {
|
||||||
|
if m != nil {
|
||||||
|
return m.Actions
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type Credential struct {
|
||||||
|
AccessKey string `protobuf:"bytes,1,opt,name=access_key,json=accessKey" json:"access_key,omitempty"`
|
||||||
|
SecretKey string `protobuf:"bytes,2,opt,name=secret_key,json=secretKey" json:"secret_key,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Credential) Reset() { *m = Credential{} }
|
||||||
|
func (m *Credential) String() string { return proto.CompactTextString(m) }
|
||||||
|
func (*Credential) ProtoMessage() {}
|
||||||
|
func (*Credential) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} }
|
||||||
|
|
||||||
|
func (m *Credential) GetAccessKey() string {
|
||||||
|
if m != nil {
|
||||||
|
return m.AccessKey
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Credential) GetSecretKey() string {
|
||||||
|
if m != nil {
|
||||||
|
return m.SecretKey
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
proto.RegisterType((*S3ApiConfiguration)(nil), "iam_pb.Identities")
|
||||||
|
proto.RegisterType((*Identity)(nil), "iam_pb.Identity")
|
||||||
|
proto.RegisterType((*Credential)(nil), "iam_pb.Credential")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reference imports to suppress errors if they are not otherwise used.
|
||||||
|
var _ context.Context
|
||||||
|
var _ grpc.ClientConn
|
||||||
|
|
||||||
|
// This is a compile-time assertion to ensure that this generated file
|
||||||
|
// is compatible with the grpc package it is being compiled against.
|
||||||
|
const _ = grpc.SupportPackageIsVersion4
|
||||||
|
|
||||||
|
// Client API for SeaweedIdentityAccessManagement service
|
||||||
|
|
||||||
|
type SeaweedIdentityAccessManagementClient interface {
|
||||||
|
}
|
||||||
|
|
||||||
|
type seaweedIdentityAccessManagementClient struct {
|
||||||
|
cc *grpc.ClientConn
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewSeaweedIdentityAccessManagementClient(cc *grpc.ClientConn) SeaweedIdentityAccessManagementClient {
|
||||||
|
return &seaweedIdentityAccessManagementClient{cc}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Server API for SeaweedIdentityAccessManagement service
|
||||||
|
|
||||||
|
type SeaweedIdentityAccessManagementServer interface {
|
||||||
|
}
|
||||||
|
|
||||||
|
func RegisterSeaweedIdentityAccessManagementServer(s *grpc.Server, srv SeaweedIdentityAccessManagementServer) {
|
||||||
|
s.RegisterService(&_SeaweedIdentityAccessManagement_serviceDesc, srv)
|
||||||
|
}
|
||||||
|
|
||||||
|
var _SeaweedIdentityAccessManagement_serviceDesc = grpc.ServiceDesc{
|
||||||
|
ServiceName: "iam_pb.SeaweedIdentityAccessManagement",
|
||||||
|
HandlerType: (*SeaweedIdentityAccessManagementServer)(nil),
|
||||||
|
Methods: []grpc.MethodDesc{},
|
||||||
|
Streams: []grpc.StreamDesc{},
|
||||||
|
Metadata: "iam.proto",
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() { proto.RegisterFile("iam.proto", fileDescriptor0) }
|
||||||
|
|
||||||
|
var fileDescriptor0 = []byte{
|
||||||
|
// 250 bytes of a gzipped FileDescriptorProto
|
||||||
|
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0x4c, 0x90, 0x41, 0x4b, 0xc3, 0x40,
|
||||||
|
0x10, 0x85, 0x69, 0x23, 0xb5, 0x99, 0x5e, 0xca, 0x9c, 0xf6, 0xa0, 0x18, 0x73, 0xca, 0x29, 0x48,
|
||||||
|
0xeb, 0x1f, 0xa8, 0x05, 0xa1, 0x16, 0x41, 0xd2, 0x1f, 0x50, 0xa6, 0xdb, 0x69, 0x19, 0xec, 0x6e,
|
||||||
|
0x42, 0x76, 0x45, 0xf2, 0xef, 0x25, 0xbb, 0x46, 0x7b, 0xdb, 0x7d, 0xdf, 0x7b, 0xb3, 0x3b, 0x0f,
|
||||||
|
0x52, 0x21, 0x53, 0x36, 0x6d, 0xed, 0x6b, 0x9c, 0x08, 0x99, 0x7d, 0x73, 0xc8, 0x5f, 0x01, 0x77,
|
||||||
|
0xcb, 0x55, 0x23, 0xeb, 0xda, 0x9e, 0xe4, 0xfc, 0xd5, 0x92, 0x97, 0xda, 0xe2, 0x13, 0x80, 0x1c,
|
||||||
|
0xd9, 0x7a, 0xf1, 0xc2, 0x4e, 0x8d, 0xb2, 0xa4, 0x98, 0x2d, 0xe6, 0x65, 0x8c, 0x94, 0x9b, 0x48,
|
||||||
|
0xba, 0xea, 0xca, 0x93, 0x5b, 0x98, 0x0e, 0x3a, 0x22, 0xdc, 0x58, 0x32, 0xac, 0x46, 0xd9, 0xa8,
|
||||||
|
0x48, 0xab, 0x70, 0xc6, 0x67, 0x98, 0xe9, 0x96, 0x83, 0x83, 0x2e, 0x4e, 0x8d, 0xc3, 0x48, 0x1c,
|
||||||
|
0x46, 0xae, 0xff, 0x50, 0x75, 0x6d, 0x43, 0x05, 0xb7, 0xa4, 0xfb, 0x1f, 0x39, 0x95, 0x64, 0x49,
|
||||||
|
0x91, 0x56, 0xc3, 0x35, 0x7f, 0x03, 0xf8, 0x0f, 0xe1, 0x3d, 0x00, 0x69, 0xcd, 0xce, 0xed, 0x3f,
|
||||||
|
0xb9, 0xfb, 0x7d, 0x37, 0x8d, 0xca, 0x96, 0xbb, 0x1e, 0x3b, 0xd6, 0x2d, 0xfb, 0x80, 0xc7, 0x11,
|
||||||
|
0x47, 0x65, 0xcb, 0xdd, 0xe2, 0x11, 0x1e, 0x76, 0x4c, 0xdf, 0xcc, 0xc7, 0x61, 0x85, 0x55, 0x88,
|
||||||
|
0xbe, 0x93, 0xa5, 0x33, 0x1b, 0xb6, 0xfe, 0xe5, 0x0e, 0xe6, 0x2e, 0x5a, 0x4e, 0xae, 0xd4, 0x17,
|
||||||
|
0xe9, 0xb5, 0xe9, 0x86, 0xcc, 0x47, 0x5f, 0xe6, 0x61, 0x12, 0x3a, 0x5d, 0xfe, 0x04, 0x00, 0x00,
|
||||||
|
0xff, 0xff, 0x83, 0x4f, 0x61, 0x03, 0x60, 0x01, 0x00, 0x00,
|
||||||
|
}
|
162
weed/s3api/auth_credentials.go
Normal file
162
weed/s3api/auth_credentials.go
Normal file
|
@ -0,0 +1,162 @@
|
||||||
|
package s3api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/golang/protobuf/jsonpb"
|
||||||
|
|
||||||
|
"github.com/chrislusf/seaweedfs/weed/glog"
|
||||||
|
"github.com/chrislusf/seaweedfs/weed/pb/iam_pb"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Action string
|
||||||
|
|
||||||
|
const (
|
||||||
|
ACTION_READ = "Read"
|
||||||
|
ACTION_WRITE = "Write"
|
||||||
|
ACTION_ADMIN = "Admin"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Iam interface {
|
||||||
|
Check(f http.HandlerFunc, actions ...Action) http.HandlerFunc
|
||||||
|
}
|
||||||
|
|
||||||
|
type IdentityAccessManagement struct {
|
||||||
|
identities []*Identity
|
||||||
|
domain string
|
||||||
|
}
|
||||||
|
|
||||||
|
type Identity struct {
|
||||||
|
Name string
|
||||||
|
Credentials []*Credential
|
||||||
|
Actions []Action
|
||||||
|
}
|
||||||
|
|
||||||
|
type Credential struct {
|
||||||
|
AccessKey string
|
||||||
|
SecretKey string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewIdentityAccessManagement(fileName string, domain string) *IdentityAccessManagement {
|
||||||
|
iam := &IdentityAccessManagement{
|
||||||
|
domain: domain,
|
||||||
|
}
|
||||||
|
if fileName == "" {
|
||||||
|
return iam
|
||||||
|
}
|
||||||
|
if err := iam.loadS3ApiConfiguration(fileName); err != nil {
|
||||||
|
glog.Fatalf("fail to load config file %s: %v", fileName, err)
|
||||||
|
}
|
||||||
|
return iam
|
||||||
|
}
|
||||||
|
|
||||||
|
func (iam *IdentityAccessManagement) loadS3ApiConfiguration(fileName string) error {
|
||||||
|
|
||||||
|
s3ApiConfiguration := &iam_pb.S3ApiConfiguration{}
|
||||||
|
|
||||||
|
rawData, readErr := ioutil.ReadFile(fileName)
|
||||||
|
if readErr != nil {
|
||||||
|
glog.Warningf("fail to read %s : %v", fileName, readErr)
|
||||||
|
return fmt.Errorf("fail to read %s : %v", fileName, readErr)
|
||||||
|
}
|
||||||
|
|
||||||
|
glog.V(1).Infof("maybeLoadVolumeInfo Unmarshal volume info %v", fileName)
|
||||||
|
if err := jsonpb.Unmarshal(bytes.NewReader(rawData), s3ApiConfiguration); err != nil {
|
||||||
|
glog.Warningf("unmarshal error: %v", err)
|
||||||
|
return fmt.Errorf("unmarshal %s error: %v", fileName, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, ident := range s3ApiConfiguration.Identities {
|
||||||
|
t := &Identity{
|
||||||
|
Name: ident.Name,
|
||||||
|
Credentials: nil,
|
||||||
|
Actions: nil,
|
||||||
|
}
|
||||||
|
for _, action := range ident.Actions {
|
||||||
|
t.Actions = append(t.Actions, Action(action))
|
||||||
|
}
|
||||||
|
for _, cred := range ident.Credentials {
|
||||||
|
t.Credentials = append(t.Credentials, &Credential{
|
||||||
|
AccessKey: cred.AccessKey,
|
||||||
|
SecretKey: cred.SecretKey,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
iam.identities = append(iam.identities, t)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (iam *IdentityAccessManagement) lookupByAccessKey(accessKey string) (identity *Identity, cred *Credential, found bool) {
|
||||||
|
for _, ident := range iam.identities {
|
||||||
|
for _, cred := range ident.Credentials {
|
||||||
|
if cred.AccessKey == accessKey {
|
||||||
|
return ident, cred, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, nil, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (iam *IdentityAccessManagement) Auth(f http.HandlerFunc, actions ...Action) http.HandlerFunc {
|
||||||
|
|
||||||
|
if len(iam.identities) == 0 {
|
||||||
|
return f
|
||||||
|
}
|
||||||
|
|
||||||
|
return func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
errCode := iam.authRequest(r, actions)
|
||||||
|
if errCode == ErrNone {
|
||||||
|
f(w, r)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
writeErrorResponse(w, errCode, r.URL)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check whether the request has valid access keys
|
||||||
|
func (iam *IdentityAccessManagement) authRequest(r *http.Request, actions []Action) ErrorCode {
|
||||||
|
var identity *Identity
|
||||||
|
var s3Err ErrorCode
|
||||||
|
switch getRequestAuthType(r) {
|
||||||
|
case authTypeStreamingSigned:
|
||||||
|
return ErrNone
|
||||||
|
case authTypeUnknown:
|
||||||
|
glog.V(3).Infof("unknown auth type")
|
||||||
|
return ErrAccessDenied
|
||||||
|
case authTypePresignedV2, authTypeSignedV2:
|
||||||
|
glog.V(3).Infof("v2 auth type")
|
||||||
|
identity, s3Err = iam.isReqAuthenticatedV2(r)
|
||||||
|
case authTypeSigned, authTypePresigned:
|
||||||
|
glog.V(3).Infof("v4 auth type")
|
||||||
|
identity, s3Err = iam.reqSignatureV4Verify(r)
|
||||||
|
}
|
||||||
|
|
||||||
|
glog.V(3).Infof("auth error: %v", s3Err)
|
||||||
|
if s3Err != ErrNone {
|
||||||
|
return s3Err
|
||||||
|
}
|
||||||
|
|
||||||
|
glog.V(3).Infof("user name: %v actions: %v", identity.Name, identity.Actions)
|
||||||
|
|
||||||
|
if !identity.canDo(actions) {
|
||||||
|
return ErrAccessDenied
|
||||||
|
}
|
||||||
|
|
||||||
|
return ErrNone
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func (identity *Identity) canDo(actions []Action) bool {
|
||||||
|
for _, a := range identity.Actions {
|
||||||
|
for _, b := range actions {
|
||||||
|
if a == b {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
68
weed/s3api/auth_credentials_test.go
Normal file
68
weed/s3api/auth_credentials_test.go
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
package s3api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/golang/protobuf/jsonpb"
|
||||||
|
|
||||||
|
"github.com/chrislusf/seaweedfs/weed/pb/iam_pb"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestIdentityListFileFormat(t *testing.T) {
|
||||||
|
|
||||||
|
s3ApiConfiguration := &iam_pb.S3ApiConfiguration{}
|
||||||
|
|
||||||
|
identity1 := &iam_pb.Identity{
|
||||||
|
Name: "some_name",
|
||||||
|
Credentials: []*iam_pb.Credential{
|
||||||
|
{
|
||||||
|
AccessKey: "some_access_key1",
|
||||||
|
SecretKey: "some_secret_key2",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Actions: []string{
|
||||||
|
ACTION_ADMIN,
|
||||||
|
ACTION_READ,
|
||||||
|
ACTION_WRITE,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
identity2 := &iam_pb.Identity{
|
||||||
|
Name: "some_read_only_user",
|
||||||
|
Credentials: []*iam_pb.Credential{
|
||||||
|
{
|
||||||
|
AccessKey: "some_access_key1",
|
||||||
|
SecretKey: "some_secret_key1",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Actions: []string{
|
||||||
|
ACTION_READ,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
identity3 := &iam_pb.Identity{
|
||||||
|
Name: "some_normal_user",
|
||||||
|
Credentials: []*iam_pb.Credential{
|
||||||
|
{
|
||||||
|
AccessKey: "some_access_key2",
|
||||||
|
SecretKey: "some_secret_key2",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Actions: []string{
|
||||||
|
ACTION_READ,
|
||||||
|
ACTION_WRITE,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
s3ApiConfiguration.Identities = append(s3ApiConfiguration.Identities, identity1)
|
||||||
|
s3ApiConfiguration.Identities = append(s3ApiConfiguration.Identities, identity2)
|
||||||
|
s3ApiConfiguration.Identities = append(s3ApiConfiguration.Identities, identity3)
|
||||||
|
|
||||||
|
m := jsonpb.Marshaler{
|
||||||
|
EmitDefaults: true,
|
||||||
|
Indent: " ",
|
||||||
|
}
|
||||||
|
|
||||||
|
text, _ := m.MarshalToString(s3ApiConfiguration)
|
||||||
|
|
||||||
|
println(text)
|
||||||
|
|
||||||
|
}
|
412
weed/s3api/auth_signature_v2.go
Normal file
412
weed/s3api/auth_signature_v2.go
Normal file
|
@ -0,0 +1,412 @@
|
||||||
|
/*
|
||||||
|
* The following code tries to reverse engineer the Amazon S3 APIs,
|
||||||
|
* and is mostly copied from minio implementation.
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
// implied. See the License for the specific language governing
|
||||||
|
// permissions and limitations under the License.
|
||||||
|
|
||||||
|
package s3api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/hmac"
|
||||||
|
"crypto/sha1"
|
||||||
|
"crypto/subtle"
|
||||||
|
"encoding/base64"
|
||||||
|
"fmt"
|
||||||
|
"net"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"path"
|
||||||
|
"sort"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Whitelist resource list that will be used in query string for signature-V2 calculation.
|
||||||
|
// The list should be alphabetically sorted
|
||||||
|
var resourceList = []string{
|
||||||
|
"acl",
|
||||||
|
"delete",
|
||||||
|
"lifecycle",
|
||||||
|
"location",
|
||||||
|
"logging",
|
||||||
|
"notification",
|
||||||
|
"partNumber",
|
||||||
|
"policy",
|
||||||
|
"requestPayment",
|
||||||
|
"response-cache-control",
|
||||||
|
"response-content-disposition",
|
||||||
|
"response-content-encoding",
|
||||||
|
"response-content-language",
|
||||||
|
"response-content-type",
|
||||||
|
"response-expires",
|
||||||
|
"torrent",
|
||||||
|
"uploadId",
|
||||||
|
"uploads",
|
||||||
|
"versionId",
|
||||||
|
"versioning",
|
||||||
|
"versions",
|
||||||
|
"website",
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify if request has valid AWS Signature Version '2'.
|
||||||
|
func (iam *IdentityAccessManagement) isReqAuthenticatedV2(r *http.Request) (*Identity, ErrorCode) {
|
||||||
|
if isRequestSignatureV2(r) {
|
||||||
|
return iam.doesSignV2Match(r)
|
||||||
|
}
|
||||||
|
return iam.doesPresignV2SignatureMatch(r)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Authorization = "AWS" + " " + AWSAccessKeyId + ":" + Signature;
|
||||||
|
// Signature = Base64( HMAC-SHA1( YourSecretKey, UTF-8-Encoding-Of( StringToSign ) ) );
|
||||||
|
//
|
||||||
|
// StringToSign = HTTP-Verb + "\n" +
|
||||||
|
// Content-Md5 + "\n" +
|
||||||
|
// Content-Type + "\n" +
|
||||||
|
// Date + "\n" +
|
||||||
|
// CanonicalizedProtocolHeaders +
|
||||||
|
// CanonicalizedResource;
|
||||||
|
//
|
||||||
|
// CanonicalizedResource = [ "/" + Bucket ] +
|
||||||
|
// <HTTP-Request-URI, from the protocol name up to the query string> +
|
||||||
|
// [ subresource, if present. For example "?acl", "?location", "?logging", or "?torrent"];
|
||||||
|
//
|
||||||
|
// CanonicalizedProtocolHeaders = <described below>
|
||||||
|
|
||||||
|
// doesSignV2Match - Verify authorization header with calculated header in accordance with
|
||||||
|
// - http://docs.aws.amazon.com/AmazonS3/latest/dev/auth-request-sig-v2.html
|
||||||
|
// returns true if matches, false otherwise. if error is not nil then it is always false
|
||||||
|
|
||||||
|
func validateV2AuthHeader(v2Auth string) (accessKey string, errCode ErrorCode) {
|
||||||
|
if v2Auth == "" {
|
||||||
|
return "", ErrAuthHeaderEmpty
|
||||||
|
}
|
||||||
|
// Verify if the header algorithm is supported or not.
|
||||||
|
if !strings.HasPrefix(v2Auth, signV2Algorithm) {
|
||||||
|
return "", ErrSignatureVersionNotSupported
|
||||||
|
}
|
||||||
|
|
||||||
|
// below is V2 Signed Auth header format, splitting on `space` (after the `AWS` string).
|
||||||
|
// Authorization = "AWS" + " " + AWSAccessKeyId + ":" + Signature
|
||||||
|
authFields := strings.Split(v2Auth, " ")
|
||||||
|
if len(authFields) != 2 {
|
||||||
|
return "", ErrMissingFields
|
||||||
|
}
|
||||||
|
|
||||||
|
// Then will be splitting on ":", this will seprate `AWSAccessKeyId` and `Signature` string.
|
||||||
|
keySignFields := strings.Split(strings.TrimSpace(authFields[1]), ":")
|
||||||
|
if len(keySignFields) != 2 {
|
||||||
|
return "", ErrMissingFields
|
||||||
|
}
|
||||||
|
|
||||||
|
return keySignFields[0], ErrNone
|
||||||
|
}
|
||||||
|
|
||||||
|
func (iam *IdentityAccessManagement) doesSignV2Match(r *http.Request) (*Identity, ErrorCode) {
|
||||||
|
v2Auth := r.Header.Get("Authorization")
|
||||||
|
|
||||||
|
accessKey, apiError := validateV2AuthHeader(v2Auth)
|
||||||
|
if apiError != ErrNone {
|
||||||
|
return nil, apiError
|
||||||
|
}
|
||||||
|
|
||||||
|
// Access credentials.
|
||||||
|
// Validate if access key id same.
|
||||||
|
ident, cred, found := iam.lookupByAccessKey(accessKey)
|
||||||
|
if !found {
|
||||||
|
return nil, ErrInvalidAccessKeyID
|
||||||
|
}
|
||||||
|
|
||||||
|
// r.RequestURI will have raw encoded URI as sent by the client.
|
||||||
|
tokens := strings.SplitN(r.RequestURI, "?", 2)
|
||||||
|
encodedResource := tokens[0]
|
||||||
|
encodedQuery := ""
|
||||||
|
if len(tokens) == 2 {
|
||||||
|
encodedQuery = tokens[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
unescapedQueries, err := unescapeQueries(encodedQuery)
|
||||||
|
if err != nil {
|
||||||
|
return nil, ErrInvalidQueryParams
|
||||||
|
}
|
||||||
|
|
||||||
|
encodedResource, err = getResource(encodedResource, r.Host, iam.domain)
|
||||||
|
if err != nil {
|
||||||
|
return nil, ErrInvalidRequest
|
||||||
|
}
|
||||||
|
|
||||||
|
prefix := fmt.Sprintf("%s %s:", signV2Algorithm, cred.AccessKey)
|
||||||
|
if !strings.HasPrefix(v2Auth, prefix) {
|
||||||
|
return nil, ErrSignatureDoesNotMatch
|
||||||
|
}
|
||||||
|
v2Auth = v2Auth[len(prefix):]
|
||||||
|
expectedAuth := signatureV2(cred, r.Method, encodedResource, strings.Join(unescapedQueries, "&"), r.Header)
|
||||||
|
if !compareSignatureV2(v2Auth, expectedAuth) {
|
||||||
|
return nil, ErrSignatureDoesNotMatch
|
||||||
|
}
|
||||||
|
return ident, ErrNone
|
||||||
|
}
|
||||||
|
|
||||||
|
// doesPresignV2SignatureMatch - Verify query headers with presigned signature
|
||||||
|
// - http://docs.aws.amazon.com/AmazonS3/latest/dev/RESTAuthentication.html#RESTAuthenticationQueryStringAuth
|
||||||
|
// returns ErrNone if matches. S3 errors otherwise.
|
||||||
|
func (iam *IdentityAccessManagement) doesPresignV2SignatureMatch(r *http.Request) (*Identity, ErrorCode) {
|
||||||
|
|
||||||
|
// r.RequestURI will have raw encoded URI as sent by the client.
|
||||||
|
tokens := strings.SplitN(r.RequestURI, "?", 2)
|
||||||
|
encodedResource := tokens[0]
|
||||||
|
encodedQuery := ""
|
||||||
|
if len(tokens) == 2 {
|
||||||
|
encodedQuery = tokens[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
filteredQueries []string
|
||||||
|
gotSignature string
|
||||||
|
expires string
|
||||||
|
accessKey string
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
|
||||||
|
var unescapedQueries []string
|
||||||
|
unescapedQueries, err = unescapeQueries(encodedQuery)
|
||||||
|
if err != nil {
|
||||||
|
return nil, ErrInvalidQueryParams
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract the necessary values from presigned query, construct a list of new filtered queries.
|
||||||
|
for _, query := range unescapedQueries {
|
||||||
|
keyval := strings.SplitN(query, "=", 2)
|
||||||
|
if len(keyval) != 2 {
|
||||||
|
return nil, ErrInvalidQueryParams
|
||||||
|
}
|
||||||
|
switch keyval[0] {
|
||||||
|
case "AWSAccessKeyId":
|
||||||
|
accessKey = keyval[1]
|
||||||
|
case "Signature":
|
||||||
|
gotSignature = keyval[1]
|
||||||
|
case "Expires":
|
||||||
|
expires = keyval[1]
|
||||||
|
default:
|
||||||
|
filteredQueries = append(filteredQueries, query)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Invalid values returns error.
|
||||||
|
if accessKey == "" || gotSignature == "" || expires == "" {
|
||||||
|
return nil, ErrInvalidQueryParams
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate if access key id same.
|
||||||
|
ident, cred, found := iam.lookupByAccessKey(accessKey)
|
||||||
|
if !found {
|
||||||
|
return nil, ErrInvalidAccessKeyID
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make sure the request has not expired.
|
||||||
|
expiresInt, err := strconv.ParseInt(expires, 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
return nil, ErrMalformedExpires
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the presigned URL has expired.
|
||||||
|
if expiresInt < time.Now().UTC().Unix() {
|
||||||
|
return nil, ErrExpiredPresignRequest
|
||||||
|
}
|
||||||
|
|
||||||
|
encodedResource, err = getResource(encodedResource, r.Host, iam.domain)
|
||||||
|
if err != nil {
|
||||||
|
return nil, ErrInvalidRequest
|
||||||
|
}
|
||||||
|
|
||||||
|
expectedSignature := preSignatureV2(cred, r.Method, encodedResource, strings.Join(filteredQueries, "&"), r.Header, expires)
|
||||||
|
if !compareSignatureV2(gotSignature, expectedSignature) {
|
||||||
|
return nil, ErrSignatureDoesNotMatch
|
||||||
|
}
|
||||||
|
|
||||||
|
return ident, ErrNone
|
||||||
|
}
|
||||||
|
|
||||||
|
// Escape encodedQuery string into unescaped list of query params, returns error
|
||||||
|
// if any while unescaping the values.
|
||||||
|
func unescapeQueries(encodedQuery string) (unescapedQueries []string, err error) {
|
||||||
|
for _, query := range strings.Split(encodedQuery, "&") {
|
||||||
|
var unescapedQuery string
|
||||||
|
unescapedQuery, err = url.QueryUnescape(query)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
unescapedQueries = append(unescapedQueries, unescapedQuery)
|
||||||
|
}
|
||||||
|
return unescapedQueries, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns "/bucketName/objectName" for path-style or virtual-host-style requests.
|
||||||
|
func getResource(path string, host string, domain string) (string, error) {
|
||||||
|
if domain == "" {
|
||||||
|
return path, nil
|
||||||
|
}
|
||||||
|
// If virtual-host-style is enabled construct the "resource" properly.
|
||||||
|
if strings.Contains(host, ":") {
|
||||||
|
// In bucket.mydomain.com:9000, strip out :9000
|
||||||
|
var err error
|
||||||
|
if host, _, err = net.SplitHostPort(host); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !strings.HasSuffix(host, "."+domain) {
|
||||||
|
return path, nil
|
||||||
|
}
|
||||||
|
bucket := strings.TrimSuffix(host, "."+domain)
|
||||||
|
return "/" + pathJoin(bucket, path), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// pathJoin - like path.Join() but retains trailing "/" of the last element
|
||||||
|
func pathJoin(elem ...string) string {
|
||||||
|
trailingSlash := ""
|
||||||
|
if len(elem) > 0 {
|
||||||
|
if strings.HasSuffix(elem[len(elem)-1], "/") {
|
||||||
|
trailingSlash = "/"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return path.Join(elem...) + trailingSlash
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return the signature v2 of a given request.
|
||||||
|
func signatureV2(cred *Credential, method string, encodedResource string, encodedQuery string, headers http.Header) string {
|
||||||
|
stringToSign := getStringToSignV2(method, encodedResource, encodedQuery, headers, "")
|
||||||
|
signature := calculateSignatureV2(stringToSign, cred.SecretKey)
|
||||||
|
return signature
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return string to sign under two different conditions.
|
||||||
|
// - if expires string is set then string to sign includes date instead of the Date header.
|
||||||
|
// - if expires string is empty then string to sign includes date header instead.
|
||||||
|
func getStringToSignV2(method string, encodedResource, encodedQuery string, headers http.Header, expires string) string {
|
||||||
|
canonicalHeaders := canonicalizedAmzHeadersV2(headers)
|
||||||
|
if len(canonicalHeaders) > 0 {
|
||||||
|
canonicalHeaders += "\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
date := expires // Date is set to expires date for presign operations.
|
||||||
|
if date == "" {
|
||||||
|
// If expires date is empty then request header Date is used.
|
||||||
|
date = headers.Get("Date")
|
||||||
|
}
|
||||||
|
|
||||||
|
// From the Amazon docs:
|
||||||
|
//
|
||||||
|
// StringToSign = HTTP-Verb + "\n" +
|
||||||
|
// Content-Md5 + "\n" +
|
||||||
|
// Content-Type + "\n" +
|
||||||
|
// Date/Expires + "\n" +
|
||||||
|
// CanonicalizedProtocolHeaders +
|
||||||
|
// CanonicalizedResource;
|
||||||
|
stringToSign := strings.Join([]string{
|
||||||
|
method,
|
||||||
|
headers.Get("Content-MD5"),
|
||||||
|
headers.Get("Content-Type"),
|
||||||
|
date,
|
||||||
|
canonicalHeaders,
|
||||||
|
}, "\n")
|
||||||
|
|
||||||
|
return stringToSign + canonicalizedResourceV2(encodedResource, encodedQuery)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return canonical resource string.
|
||||||
|
func canonicalizedResourceV2(encodedResource, encodedQuery string) string {
|
||||||
|
queries := strings.Split(encodedQuery, "&")
|
||||||
|
keyval := make(map[string]string)
|
||||||
|
for _, query := range queries {
|
||||||
|
key := query
|
||||||
|
val := ""
|
||||||
|
index := strings.Index(query, "=")
|
||||||
|
if index != -1 {
|
||||||
|
key = query[:index]
|
||||||
|
val = query[index+1:]
|
||||||
|
}
|
||||||
|
keyval[key] = val
|
||||||
|
}
|
||||||
|
|
||||||
|
var canonicalQueries []string
|
||||||
|
for _, key := range resourceList {
|
||||||
|
val, ok := keyval[key]
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if val == "" {
|
||||||
|
canonicalQueries = append(canonicalQueries, key)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
canonicalQueries = append(canonicalQueries, key+"="+val)
|
||||||
|
}
|
||||||
|
|
||||||
|
// The queries will be already sorted as resourceList is sorted, if canonicalQueries
|
||||||
|
// is empty strings.Join returns empty.
|
||||||
|
canonicalQuery := strings.Join(canonicalQueries, "&")
|
||||||
|
if canonicalQuery != "" {
|
||||||
|
return encodedResource + "?" + canonicalQuery
|
||||||
|
}
|
||||||
|
return encodedResource
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return canonical headers.
|
||||||
|
func canonicalizedAmzHeadersV2(headers http.Header) string {
|
||||||
|
var keys []string
|
||||||
|
keyval := make(map[string]string)
|
||||||
|
for key := range headers {
|
||||||
|
lkey := strings.ToLower(key)
|
||||||
|
if !strings.HasPrefix(lkey, "x-amz-") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
keys = append(keys, lkey)
|
||||||
|
keyval[lkey] = strings.Join(headers[key], ",")
|
||||||
|
}
|
||||||
|
sort.Strings(keys)
|
||||||
|
var canonicalHeaders []string
|
||||||
|
for _, key := range keys {
|
||||||
|
canonicalHeaders = append(canonicalHeaders, key+":"+keyval[key])
|
||||||
|
}
|
||||||
|
return strings.Join(canonicalHeaders, "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func calculateSignatureV2(stringToSign string, secret string) string {
|
||||||
|
hm := hmac.New(sha1.New, []byte(secret))
|
||||||
|
hm.Write([]byte(stringToSign))
|
||||||
|
return base64.StdEncoding.EncodeToString(hm.Sum(nil))
|
||||||
|
}
|
||||||
|
|
||||||
|
// compareSignatureV2 returns true if and only if both signatures
|
||||||
|
// are equal. The signatures are expected to be base64 encoded strings
|
||||||
|
// according to the AWS S3 signature V2 spec.
|
||||||
|
func compareSignatureV2(sig1, sig2 string) bool {
|
||||||
|
// Decode signature string to binary byte-sequence representation is required
|
||||||
|
// as Base64 encoding of a value is not unique:
|
||||||
|
// For example "aGVsbG8=" and "aGVsbG8=\r" will result in the same byte slice.
|
||||||
|
signature1, err := base64.StdEncoding.DecodeString(sig1)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
signature2, err := base64.StdEncoding.DecodeString(sig2)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return subtle.ConstantTimeCompare(signature1, signature2) == 1
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return signature-v2 for the presigned request.
|
||||||
|
func preSignatureV2(cred *Credential, method string, encodedResource string, encodedQuery string, headers http.Header, expires string) string {
|
||||||
|
stringToSign := getStringToSignV2(method, encodedResource, encodedQuery, headers, expires)
|
||||||
|
return calculateSignatureV2(stringToSign, cred.SecretKey)
|
||||||
|
}
|
720
weed/s3api/auth_signature_v4.go
Normal file
720
weed/s3api/auth_signature_v4.go
Normal file
|
@ -0,0 +1,720 @@
|
||||||
|
/*
|
||||||
|
* The following code tries to reverse engineer the Amazon S3 APIs,
|
||||||
|
* and is mostly copied from minio implementation.
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
// implied. See the License for the specific language governing
|
||||||
|
// permissions and limitations under the License.
|
||||||
|
|
||||||
|
package s3api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"crypto/hmac"
|
||||||
|
"crypto/sha256"
|
||||||
|
"crypto/subtle"
|
||||||
|
"encoding/hex"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"regexp"
|
||||||
|
"sort"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
"unicode/utf8"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (iam *IdentityAccessManagement) reqSignatureV4Verify(r *http.Request) (*Identity, ErrorCode) {
|
||||||
|
sha256sum := getContentSha256Cksum(r)
|
||||||
|
switch {
|
||||||
|
case isRequestSignatureV4(r):
|
||||||
|
return iam.doesSignatureMatch(sha256sum, r)
|
||||||
|
case isRequestPresignedSignatureV4(r):
|
||||||
|
return iam.doesPresignedSignatureMatch(sha256sum, r)
|
||||||
|
}
|
||||||
|
return nil, ErrAccessDenied
|
||||||
|
}
|
||||||
|
|
||||||
|
// Streaming AWS Signature Version '4' constants.
|
||||||
|
const (
|
||||||
|
emptySHA256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
|
||||||
|
streamingContentSHA256 = "STREAMING-AWS4-HMAC-SHA256-PAYLOAD"
|
||||||
|
signV4ChunkedAlgorithm = "AWS4-HMAC-SHA256-PAYLOAD"
|
||||||
|
|
||||||
|
// http Header "x-amz-content-sha256" == "UNSIGNED-PAYLOAD" indicates that the
|
||||||
|
// client did not calculate sha256 of the payload.
|
||||||
|
unsignedPayload = "UNSIGNED-PAYLOAD"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Returns SHA256 for calculating canonical-request.
|
||||||
|
func getContentSha256Cksum(r *http.Request) string {
|
||||||
|
var (
|
||||||
|
defaultSha256Cksum string
|
||||||
|
v []string
|
||||||
|
ok bool
|
||||||
|
)
|
||||||
|
|
||||||
|
// For a presigned request we look at the query param for sha256.
|
||||||
|
if isRequestPresignedSignatureV4(r) {
|
||||||
|
// X-Amz-Content-Sha256, if not set in presigned requests, checksum
|
||||||
|
// will default to 'UNSIGNED-PAYLOAD'.
|
||||||
|
defaultSha256Cksum = unsignedPayload
|
||||||
|
v, ok = r.URL.Query()["X-Amz-Content-Sha256"]
|
||||||
|
if !ok {
|
||||||
|
v, ok = r.Header["X-Amz-Content-Sha256"]
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// X-Amz-Content-Sha256, if not set in signed requests, checksum
|
||||||
|
// will default to sha256([]byte("")).
|
||||||
|
defaultSha256Cksum = emptySHA256
|
||||||
|
v, ok = r.Header["X-Amz-Content-Sha256"]
|
||||||
|
}
|
||||||
|
|
||||||
|
// We found 'X-Amz-Content-Sha256' return the captured value.
|
||||||
|
if ok {
|
||||||
|
return v[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
// We couldn't find 'X-Amz-Content-Sha256'.
|
||||||
|
return defaultSha256Cksum
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify authorization header - http://docs.aws.amazon.com/AmazonS3/latest/API/sig-v4-authenticating-requests.html
|
||||||
|
func (iam *IdentityAccessManagement) doesSignatureMatch(hashedPayload string, r *http.Request) (*Identity, ErrorCode) {
|
||||||
|
|
||||||
|
// Copy request.
|
||||||
|
req := *r
|
||||||
|
|
||||||
|
// Save authorization header.
|
||||||
|
v4Auth := req.Header.Get("Authorization")
|
||||||
|
|
||||||
|
// Parse signature version '4' header.
|
||||||
|
signV4Values, err := parseSignV4(v4Auth)
|
||||||
|
if err != ErrNone {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract all the signed headers along with its values.
|
||||||
|
extractedSignedHeaders, errCode := extractSignedHeaders(signV4Values.SignedHeaders, r)
|
||||||
|
if errCode != ErrNone {
|
||||||
|
return nil, errCode
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify if the access key id matches.
|
||||||
|
identity, cred, found := iam.lookupByAccessKey(signV4Values.Credential.accessKey)
|
||||||
|
if !found {
|
||||||
|
return nil, ErrInvalidAccessKeyID
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract date, if not present throw error.
|
||||||
|
var date string
|
||||||
|
if date = req.Header.Get(http.CanonicalHeaderKey("X-Amz-Date")); date == "" {
|
||||||
|
if date = r.Header.Get("Date"); date == "" {
|
||||||
|
return nil, ErrMissingDateHeader
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Parse date header.
|
||||||
|
t, e := time.Parse(iso8601Format, date)
|
||||||
|
if e != nil {
|
||||||
|
return nil, ErrMalformedDate
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query string.
|
||||||
|
queryStr := req.URL.Query().Encode()
|
||||||
|
|
||||||
|
// Get canonical request.
|
||||||
|
canonicalRequest := getCanonicalRequest(extractedSignedHeaders, hashedPayload, queryStr, req.URL.Path, req.Method)
|
||||||
|
|
||||||
|
// Get string to sign from canonical request.
|
||||||
|
stringToSign := getStringToSign(canonicalRequest, t, signV4Values.Credential.getScope())
|
||||||
|
|
||||||
|
// Get hmac signing key.
|
||||||
|
signingKey := getSigningKey(cred.SecretKey, signV4Values.Credential.scope.date, signV4Values.Credential.scope.region)
|
||||||
|
|
||||||
|
// Calculate signature.
|
||||||
|
newSignature := getSignature(signingKey, stringToSign)
|
||||||
|
|
||||||
|
// Verify if signature match.
|
||||||
|
if !compareSignatureV4(newSignature, signV4Values.Signature) {
|
||||||
|
return nil, ErrSignatureDoesNotMatch
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return error none.
|
||||||
|
return identity, ErrNone
|
||||||
|
}
|
||||||
|
|
||||||
|
// credentialHeader data type represents structured form of Credential
|
||||||
|
// string from authorization header.
|
||||||
|
type credentialHeader struct {
|
||||||
|
accessKey string
|
||||||
|
scope struct {
|
||||||
|
date time.Time
|
||||||
|
region string
|
||||||
|
service string
|
||||||
|
request string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// signValues data type represents structured form of AWS Signature V4 header.
|
||||||
|
type signValues struct {
|
||||||
|
Credential credentialHeader
|
||||||
|
SignedHeaders []string
|
||||||
|
Signature string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return scope string.
|
||||||
|
func (c credentialHeader) getScope() string {
|
||||||
|
return strings.Join([]string{
|
||||||
|
c.scope.date.Format(yyyymmdd),
|
||||||
|
c.scope.region,
|
||||||
|
c.scope.service,
|
||||||
|
c.scope.request,
|
||||||
|
}, "/")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Authorization: algorithm Credential=accessKeyID/credScope, \
|
||||||
|
// SignedHeaders=signedHeaders, Signature=signature
|
||||||
|
//
|
||||||
|
func parseSignV4(v4Auth string) (sv signValues, aec ErrorCode) {
|
||||||
|
// Replace all spaced strings, some clients can send spaced
|
||||||
|
// parameters and some won't. So we pro-actively remove any spaces
|
||||||
|
// to make parsing easier.
|
||||||
|
v4Auth = strings.Replace(v4Auth, " ", "", -1)
|
||||||
|
if v4Auth == "" {
|
||||||
|
return sv, ErrAuthHeaderEmpty
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify if the header algorithm is supported or not.
|
||||||
|
if !strings.HasPrefix(v4Auth, signV4Algorithm) {
|
||||||
|
return sv, ErrSignatureVersionNotSupported
|
||||||
|
}
|
||||||
|
|
||||||
|
// Strip off the Algorithm prefix.
|
||||||
|
v4Auth = strings.TrimPrefix(v4Auth, signV4Algorithm)
|
||||||
|
authFields := strings.Split(strings.TrimSpace(v4Auth), ",")
|
||||||
|
if len(authFields) != 3 {
|
||||||
|
return sv, ErrMissingFields
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize signature version '4' structured header.
|
||||||
|
signV4Values := signValues{}
|
||||||
|
|
||||||
|
var err ErrorCode
|
||||||
|
// Save credentail values.
|
||||||
|
signV4Values.Credential, err = parseCredentialHeader(authFields[0])
|
||||||
|
if err != ErrNone {
|
||||||
|
return sv, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save signed headers.
|
||||||
|
signV4Values.SignedHeaders, err = parseSignedHeader(authFields[1])
|
||||||
|
if err != ErrNone {
|
||||||
|
return sv, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save signature.
|
||||||
|
signV4Values.Signature, err = parseSignature(authFields[2])
|
||||||
|
if err != ErrNone {
|
||||||
|
return sv, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return the structure here.
|
||||||
|
return signV4Values, ErrNone
|
||||||
|
}
|
||||||
|
|
||||||
|
// parse credentialHeader string into its structured form.
|
||||||
|
func parseCredentialHeader(credElement string) (ch credentialHeader, aec ErrorCode) {
|
||||||
|
creds := strings.Split(strings.TrimSpace(credElement), "=")
|
||||||
|
if len(creds) != 2 {
|
||||||
|
return ch, ErrMissingFields
|
||||||
|
}
|
||||||
|
if creds[0] != "Credential" {
|
||||||
|
return ch, ErrMissingCredTag
|
||||||
|
}
|
||||||
|
credElements := strings.Split(strings.TrimSpace(creds[1]), "/")
|
||||||
|
if len(credElements) != 5 {
|
||||||
|
return ch, ErrCredMalformed
|
||||||
|
}
|
||||||
|
// Save access key id.
|
||||||
|
cred := credentialHeader{
|
||||||
|
accessKey: credElements[0],
|
||||||
|
}
|
||||||
|
var e error
|
||||||
|
cred.scope.date, e = time.Parse(yyyymmdd, credElements[1])
|
||||||
|
if e != nil {
|
||||||
|
return ch, ErrMalformedCredentialDate
|
||||||
|
}
|
||||||
|
|
||||||
|
cred.scope.region = credElements[2]
|
||||||
|
cred.scope.service = credElements[3] // "s3"
|
||||||
|
cred.scope.request = credElements[4] // "aws4_request"
|
||||||
|
return cred, ErrNone
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse slice of signed headers from signed headers tag.
|
||||||
|
func parseSignedHeader(signedHdrElement string) ([]string, ErrorCode) {
|
||||||
|
signedHdrFields := strings.Split(strings.TrimSpace(signedHdrElement), "=")
|
||||||
|
if len(signedHdrFields) != 2 {
|
||||||
|
return nil, ErrMissingFields
|
||||||
|
}
|
||||||
|
if signedHdrFields[0] != "SignedHeaders" {
|
||||||
|
return nil, ErrMissingSignHeadersTag
|
||||||
|
}
|
||||||
|
if signedHdrFields[1] == "" {
|
||||||
|
return nil, ErrMissingFields
|
||||||
|
}
|
||||||
|
signedHeaders := strings.Split(signedHdrFields[1], ";")
|
||||||
|
return signedHeaders, ErrNone
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse signature from signature tag.
|
||||||
|
func parseSignature(signElement string) (string, ErrorCode) {
|
||||||
|
signFields := strings.Split(strings.TrimSpace(signElement), "=")
|
||||||
|
if len(signFields) != 2 {
|
||||||
|
return "", ErrMissingFields
|
||||||
|
}
|
||||||
|
if signFields[0] != "Signature" {
|
||||||
|
return "", ErrMissingSignTag
|
||||||
|
}
|
||||||
|
if signFields[1] == "" {
|
||||||
|
return "", ErrMissingFields
|
||||||
|
}
|
||||||
|
signature := signFields[1]
|
||||||
|
return signature, ErrNone
|
||||||
|
}
|
||||||
|
|
||||||
|
// check query headers with presigned signature
|
||||||
|
// - http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html
|
||||||
|
func (iam *IdentityAccessManagement) doesPresignedSignatureMatch(hashedPayload string, r *http.Request) (*Identity, ErrorCode) {
|
||||||
|
|
||||||
|
// Copy request
|
||||||
|
req := *r
|
||||||
|
|
||||||
|
// Parse request query string.
|
||||||
|
pSignValues, err := parsePreSignV4(req.URL.Query())
|
||||||
|
if err != ErrNone {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify if the access key id matches.
|
||||||
|
identity, cred, found := iam.lookupByAccessKey(pSignValues.Credential.accessKey)
|
||||||
|
if !found {
|
||||||
|
return nil, ErrInvalidAccessKeyID
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract all the signed headers along with its values.
|
||||||
|
extractedSignedHeaders, errCode := extractSignedHeaders(pSignValues.SignedHeaders, r)
|
||||||
|
if errCode != ErrNone {
|
||||||
|
return nil, errCode
|
||||||
|
}
|
||||||
|
// Construct new query.
|
||||||
|
query := make(url.Values)
|
||||||
|
if req.URL.Query().Get("X-Amz-Content-Sha256") != "" {
|
||||||
|
query.Set("X-Amz-Content-Sha256", hashedPayload)
|
||||||
|
}
|
||||||
|
|
||||||
|
query.Set("X-Amz-Algorithm", signV4Algorithm)
|
||||||
|
|
||||||
|
now := time.Now().UTC()
|
||||||
|
|
||||||
|
// If the host which signed the request is slightly ahead in time (by less than globalMaxSkewTime) the
|
||||||
|
// request should still be allowed.
|
||||||
|
if pSignValues.Date.After(now.Add(15 * time.Minute)) {
|
||||||
|
return nil, ErrRequestNotReadyYet
|
||||||
|
}
|
||||||
|
|
||||||
|
if now.Sub(pSignValues.Date) > pSignValues.Expires {
|
||||||
|
return nil, ErrExpiredPresignRequest
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save the date and expires.
|
||||||
|
t := pSignValues.Date
|
||||||
|
expireSeconds := int(pSignValues.Expires / time.Second)
|
||||||
|
|
||||||
|
// Construct the query.
|
||||||
|
query.Set("X-Amz-Date", t.Format(iso8601Format))
|
||||||
|
query.Set("X-Amz-Expires", strconv.Itoa(expireSeconds))
|
||||||
|
query.Set("X-Amz-SignedHeaders", getSignedHeaders(extractedSignedHeaders))
|
||||||
|
query.Set("X-Amz-Credential", cred.AccessKey+"/"+getScope(t, pSignValues.Credential.scope.region))
|
||||||
|
|
||||||
|
// Save other headers available in the request parameters.
|
||||||
|
for k, v := range req.URL.Query() {
|
||||||
|
|
||||||
|
// Handle the metadata in presigned put query string
|
||||||
|
if strings.Contains(strings.ToLower(k), "x-amz-meta-") {
|
||||||
|
query.Set(k, v[0])
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.HasPrefix(strings.ToLower(k), "x-amz") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
query[k] = v
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the encoded query.
|
||||||
|
encodedQuery := query.Encode()
|
||||||
|
|
||||||
|
// Verify if date query is same.
|
||||||
|
if req.URL.Query().Get("X-Amz-Date") != query.Get("X-Amz-Date") {
|
||||||
|
return nil, ErrSignatureDoesNotMatch
|
||||||
|
}
|
||||||
|
// Verify if expires query is same.
|
||||||
|
if req.URL.Query().Get("X-Amz-Expires") != query.Get("X-Amz-Expires") {
|
||||||
|
return nil, ErrSignatureDoesNotMatch
|
||||||
|
}
|
||||||
|
// Verify if signed headers query is same.
|
||||||
|
if req.URL.Query().Get("X-Amz-SignedHeaders") != query.Get("X-Amz-SignedHeaders") {
|
||||||
|
return nil, ErrSignatureDoesNotMatch
|
||||||
|
}
|
||||||
|
// Verify if credential query is same.
|
||||||
|
if req.URL.Query().Get("X-Amz-Credential") != query.Get("X-Amz-Credential") {
|
||||||
|
return nil, ErrSignatureDoesNotMatch
|
||||||
|
}
|
||||||
|
// Verify if sha256 payload query is same.
|
||||||
|
if req.URL.Query().Get("X-Amz-Content-Sha256") != "" {
|
||||||
|
if req.URL.Query().Get("X-Amz-Content-Sha256") != query.Get("X-Amz-Content-Sha256") {
|
||||||
|
return nil, ErrContentSHA256Mismatch
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Verify finally if signature is same.
|
||||||
|
|
||||||
|
// Get canonical request.
|
||||||
|
presignedCanonicalReq := getCanonicalRequest(extractedSignedHeaders, hashedPayload, encodedQuery, req.URL.Path, req.Method)
|
||||||
|
|
||||||
|
// Get string to sign from canonical request.
|
||||||
|
presignedStringToSign := getStringToSign(presignedCanonicalReq, t, pSignValues.Credential.getScope())
|
||||||
|
|
||||||
|
// Get hmac presigned signing key.
|
||||||
|
presignedSigningKey := getSigningKey(cred.SecretKey, pSignValues.Credential.scope.date, pSignValues.Credential.scope.region)
|
||||||
|
|
||||||
|
// Get new signature.
|
||||||
|
newSignature := getSignature(presignedSigningKey, presignedStringToSign)
|
||||||
|
|
||||||
|
// Verify signature.
|
||||||
|
if !compareSignatureV4(req.URL.Query().Get("X-Amz-Signature"), newSignature) {
|
||||||
|
return nil, ErrSignatureDoesNotMatch
|
||||||
|
}
|
||||||
|
return identity, ErrNone
|
||||||
|
}
|
||||||
|
|
||||||
|
func contains(list []string, elem string) bool {
|
||||||
|
for _, t := range list {
|
||||||
|
if t == elem {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// preSignValues data type represents structued form of AWS Signature V4 query string.
|
||||||
|
type preSignValues struct {
|
||||||
|
signValues
|
||||||
|
Date time.Time
|
||||||
|
Expires time.Duration
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parses signature version '4' query string of the following form.
|
||||||
|
//
|
||||||
|
// querystring = X-Amz-Algorithm=algorithm
|
||||||
|
// querystring += &X-Amz-Credential= urlencode(accessKey + '/' + credential_scope)
|
||||||
|
// querystring += &X-Amz-Date=date
|
||||||
|
// querystring += &X-Amz-Expires=timeout interval
|
||||||
|
// querystring += &X-Amz-SignedHeaders=signed_headers
|
||||||
|
// querystring += &X-Amz-Signature=signature
|
||||||
|
//
|
||||||
|
// verifies if any of the necessary query params are missing in the presigned request.
|
||||||
|
func doesV4PresignParamsExist(query url.Values) ErrorCode {
|
||||||
|
v4PresignQueryParams := []string{"X-Amz-Algorithm", "X-Amz-Credential", "X-Amz-Signature", "X-Amz-Date", "X-Amz-SignedHeaders", "X-Amz-Expires"}
|
||||||
|
for _, v4PresignQueryParam := range v4PresignQueryParams {
|
||||||
|
if _, ok := query[v4PresignQueryParam]; !ok {
|
||||||
|
return ErrInvalidQueryParams
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ErrNone
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parses all the presigned signature values into separate elements.
|
||||||
|
func parsePreSignV4(query url.Values) (psv preSignValues, aec ErrorCode) {
|
||||||
|
var err ErrorCode
|
||||||
|
// verify whether the required query params exist.
|
||||||
|
err = doesV4PresignParamsExist(query)
|
||||||
|
if err != ErrNone {
|
||||||
|
return psv, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify if the query algorithm is supported or not.
|
||||||
|
if query.Get("X-Amz-Algorithm") != signV4Algorithm {
|
||||||
|
return psv, ErrInvalidQuerySignatureAlgo
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize signature version '4' structured header.
|
||||||
|
preSignV4Values := preSignValues{}
|
||||||
|
|
||||||
|
// Save credential.
|
||||||
|
preSignV4Values.Credential, err = parseCredentialHeader("Credential=" + query.Get("X-Amz-Credential"))
|
||||||
|
if err != ErrNone {
|
||||||
|
return psv, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var e error
|
||||||
|
// Save date in native time.Time.
|
||||||
|
preSignV4Values.Date, e = time.Parse(iso8601Format, query.Get("X-Amz-Date"))
|
||||||
|
if e != nil {
|
||||||
|
return psv, ErrMalformedPresignedDate
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save expires in native time.Duration.
|
||||||
|
preSignV4Values.Expires, e = time.ParseDuration(query.Get("X-Amz-Expires") + "s")
|
||||||
|
if e != nil {
|
||||||
|
return psv, ErrMalformedExpires
|
||||||
|
}
|
||||||
|
|
||||||
|
if preSignV4Values.Expires < 0 {
|
||||||
|
return psv, ErrNegativeExpires
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if Expiry time is less than 7 days (value in seconds).
|
||||||
|
if preSignV4Values.Expires.Seconds() > 604800 {
|
||||||
|
return psv, ErrMaximumExpires
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save signed headers.
|
||||||
|
preSignV4Values.SignedHeaders, err = parseSignedHeader("SignedHeaders=" + query.Get("X-Amz-SignedHeaders"))
|
||||||
|
if err != ErrNone {
|
||||||
|
return psv, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save signature.
|
||||||
|
preSignV4Values.Signature, err = parseSignature("Signature=" + query.Get("X-Amz-Signature"))
|
||||||
|
if err != ErrNone {
|
||||||
|
return psv, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return structed form of signature query string.
|
||||||
|
return preSignV4Values, ErrNone
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractSignedHeaders extract signed headers from Authorization header
|
||||||
|
func extractSignedHeaders(signedHeaders []string, r *http.Request) (http.Header, ErrorCode) {
|
||||||
|
reqHeaders := r.Header
|
||||||
|
// find whether "host" is part of list of signed headers.
|
||||||
|
// if not return ErrUnsignedHeaders. "host" is mandatory.
|
||||||
|
if !contains(signedHeaders, "host") {
|
||||||
|
return nil, ErrUnsignedHeaders
|
||||||
|
}
|
||||||
|
extractedSignedHeaders := make(http.Header)
|
||||||
|
for _, header := range signedHeaders {
|
||||||
|
// `host` will not be found in the headers, can be found in r.Host.
|
||||||
|
// but its alway necessary that the list of signed headers containing host in it.
|
||||||
|
val, ok := reqHeaders[http.CanonicalHeaderKey(header)]
|
||||||
|
if ok {
|
||||||
|
for _, enc := range val {
|
||||||
|
extractedSignedHeaders.Add(header, enc)
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
switch header {
|
||||||
|
case "expect":
|
||||||
|
// Golang http server strips off 'Expect' header, if the
|
||||||
|
// client sent this as part of signed headers we need to
|
||||||
|
// handle otherwise we would see a signature mismatch.
|
||||||
|
// `aws-cli` sets this as part of signed headers.
|
||||||
|
//
|
||||||
|
// According to
|
||||||
|
// http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.20
|
||||||
|
// Expect header is always of form:
|
||||||
|
//
|
||||||
|
// Expect = "Expect" ":" 1#expectation
|
||||||
|
// expectation = "100-continue" | expectation-extension
|
||||||
|
//
|
||||||
|
// So it safe to assume that '100-continue' is what would
|
||||||
|
// be sent, for the time being keep this work around.
|
||||||
|
// Adding a *TODO* to remove this later when Golang server
|
||||||
|
// doesn't filter out the 'Expect' header.
|
||||||
|
extractedSignedHeaders.Set(header, "100-continue")
|
||||||
|
case "host":
|
||||||
|
// Go http server removes "host" from Request.Header
|
||||||
|
extractedSignedHeaders.Set(header, r.Host)
|
||||||
|
case "transfer-encoding":
|
||||||
|
for _, enc := range r.TransferEncoding {
|
||||||
|
extractedSignedHeaders.Add(header, enc)
|
||||||
|
}
|
||||||
|
case "content-length":
|
||||||
|
// Signature-V4 spec excludes Content-Length from signed headers list for signature calculation.
|
||||||
|
// But some clients deviate from this rule. Hence we consider Content-Length for signature
|
||||||
|
// calculation to be compatible with such clients.
|
||||||
|
extractedSignedHeaders.Set(header, strconv.FormatInt(r.ContentLength, 10))
|
||||||
|
default:
|
||||||
|
return nil, ErrUnsignedHeaders
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return extractedSignedHeaders, ErrNone
|
||||||
|
}
|
||||||
|
|
||||||
|
// getSignedHeaders generate a string i.e alphabetically sorted, semicolon-separated list of lowercase request header names
|
||||||
|
func getSignedHeaders(signedHeaders http.Header) string {
|
||||||
|
var headers []string
|
||||||
|
for k := range signedHeaders {
|
||||||
|
headers = append(headers, strings.ToLower(k))
|
||||||
|
}
|
||||||
|
sort.Strings(headers)
|
||||||
|
return strings.Join(headers, ";")
|
||||||
|
}
|
||||||
|
|
||||||
|
// getScope generate a string of a specific date, an AWS region, and a service.
|
||||||
|
func getScope(t time.Time, region string) string {
|
||||||
|
scope := strings.Join([]string{
|
||||||
|
t.Format(yyyymmdd),
|
||||||
|
region,
|
||||||
|
"s3",
|
||||||
|
"aws4_request",
|
||||||
|
}, "/")
|
||||||
|
return scope
|
||||||
|
}
|
||||||
|
|
||||||
|
// getCanonicalRequest generate a canonical request of style
|
||||||
|
//
|
||||||
|
// canonicalRequest =
|
||||||
|
// <HTTPMethod>\n
|
||||||
|
// <CanonicalURI>\n
|
||||||
|
// <CanonicalQueryString>\n
|
||||||
|
// <CanonicalHeaders>\n
|
||||||
|
// <SignedHeaders>\n
|
||||||
|
// <HashedPayload>
|
||||||
|
//
|
||||||
|
func getCanonicalRequest(extractedSignedHeaders http.Header, payload, queryStr, urlPath, method string) string {
|
||||||
|
rawQuery := strings.Replace(queryStr, "+", "%20", -1)
|
||||||
|
encodedPath := encodePath(urlPath)
|
||||||
|
canonicalRequest := strings.Join([]string{
|
||||||
|
method,
|
||||||
|
encodedPath,
|
||||||
|
rawQuery,
|
||||||
|
getCanonicalHeaders(extractedSignedHeaders),
|
||||||
|
getSignedHeaders(extractedSignedHeaders),
|
||||||
|
payload,
|
||||||
|
}, "\n")
|
||||||
|
return canonicalRequest
|
||||||
|
}
|
||||||
|
|
||||||
|
// getStringToSign a string based on selected query values.
|
||||||
|
func getStringToSign(canonicalRequest string, t time.Time, scope string) string {
|
||||||
|
stringToSign := signV4Algorithm + "\n" + t.Format(iso8601Format) + "\n"
|
||||||
|
stringToSign = stringToSign + scope + "\n"
|
||||||
|
canonicalRequestBytes := sha256.Sum256([]byte(canonicalRequest))
|
||||||
|
stringToSign = stringToSign + hex.EncodeToString(canonicalRequestBytes[:])
|
||||||
|
return stringToSign
|
||||||
|
}
|
||||||
|
|
||||||
|
// sumHMAC calculate hmac between two input byte array.
|
||||||
|
func sumHMAC(key []byte, data []byte) []byte {
|
||||||
|
hash := hmac.New(sha256.New, key)
|
||||||
|
hash.Write(data)
|
||||||
|
return hash.Sum(nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// getSigningKey hmac seed to calculate final signature.
|
||||||
|
func getSigningKey(secretKey string, t time.Time, region string) []byte {
|
||||||
|
date := sumHMAC([]byte("AWS4"+secretKey), []byte(t.Format(yyyymmdd)))
|
||||||
|
regionBytes := sumHMAC(date, []byte(region))
|
||||||
|
service := sumHMAC(regionBytes, []byte("s3"))
|
||||||
|
signingKey := sumHMAC(service, []byte("aws4_request"))
|
||||||
|
return signingKey
|
||||||
|
}
|
||||||
|
|
||||||
|
// getSignature final signature in hexadecimal form.
|
||||||
|
func getSignature(signingKey []byte, stringToSign string) string {
|
||||||
|
return hex.EncodeToString(sumHMAC(signingKey, []byte(stringToSign)))
|
||||||
|
}
|
||||||
|
|
||||||
|
// getCanonicalHeaders generate a list of request headers with their values
|
||||||
|
func getCanonicalHeaders(signedHeaders http.Header) string {
|
||||||
|
var headers []string
|
||||||
|
vals := make(http.Header)
|
||||||
|
for k, vv := range signedHeaders {
|
||||||
|
headers = append(headers, strings.ToLower(k))
|
||||||
|
vals[strings.ToLower(k)] = vv
|
||||||
|
}
|
||||||
|
sort.Strings(headers)
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
for _, k := range headers {
|
||||||
|
buf.WriteString(k)
|
||||||
|
buf.WriteByte(':')
|
||||||
|
for idx, v := range vals[k] {
|
||||||
|
if idx > 0 {
|
||||||
|
buf.WriteByte(',')
|
||||||
|
}
|
||||||
|
buf.WriteString(signV4TrimAll(v))
|
||||||
|
}
|
||||||
|
buf.WriteByte('\n')
|
||||||
|
}
|
||||||
|
return buf.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Trim leading and trailing spaces and replace sequential spaces with one space, following Trimall()
|
||||||
|
// in http://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html
|
||||||
|
func signV4TrimAll(input string) string {
|
||||||
|
// Compress adjacent spaces (a space is determined by
|
||||||
|
// unicode.IsSpace() internally here) to one space and return
|
||||||
|
return strings.Join(strings.Fields(input), " ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// if object matches reserved string, no need to encode them
|
||||||
|
var reservedObjectNames = regexp.MustCompile("^[a-zA-Z0-9-_.~/]+$")
|
||||||
|
|
||||||
|
// EncodePath encode the strings from UTF-8 byte representations to HTML hex escape sequences
|
||||||
|
//
|
||||||
|
// This is necessary since regular url.Parse() and url.Encode() functions do not support UTF-8
|
||||||
|
// non english characters cannot be parsed due to the nature in which url.Encode() is written
|
||||||
|
//
|
||||||
|
// This function on the other hand is a direct replacement for url.Encode() technique to support
|
||||||
|
// pretty much every UTF-8 character.
|
||||||
|
func encodePath(pathName string) string {
|
||||||
|
if reservedObjectNames.MatchString(pathName) {
|
||||||
|
return pathName
|
||||||
|
}
|
||||||
|
var encodedPathname string
|
||||||
|
for _, s := range pathName {
|
||||||
|
if 'A' <= s && s <= 'Z' || 'a' <= s && s <= 'z' || '0' <= s && s <= '9' { // §2.3 Unreserved characters (mark)
|
||||||
|
encodedPathname = encodedPathname + string(s)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
switch s {
|
||||||
|
case '-', '_', '.', '~', '/': // §2.3 Unreserved characters (mark)
|
||||||
|
encodedPathname = encodedPathname + string(s)
|
||||||
|
continue
|
||||||
|
default:
|
||||||
|
len := utf8.RuneLen(s)
|
||||||
|
if len < 0 {
|
||||||
|
// if utf8 cannot convert return the same string as is
|
||||||
|
return pathName
|
||||||
|
}
|
||||||
|
u := make([]byte, len)
|
||||||
|
utf8.EncodeRune(u, s)
|
||||||
|
for _, r := range u {
|
||||||
|
hex := hex.EncodeToString([]byte{r})
|
||||||
|
encodedPathname = encodedPathname + "%" + strings.ToUpper(hex)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return encodedPathname
|
||||||
|
}
|
||||||
|
|
||||||
|
// compareSignatureV4 returns true if and only if both signatures
|
||||||
|
// are equal. The signatures are expected to be HEX encoded strings
|
||||||
|
// according to the AWS S3 signature V4 spec.
|
||||||
|
func compareSignatureV4(sig1, sig2 string) bool {
|
||||||
|
// The CTC using []byte(str) works because the hex encoding
|
||||||
|
// is unique for a sequence of bytes. See also compareSignatureV2.
|
||||||
|
return subtle.ConstantTimeCompare([]byte(sig1), []byte(sig2)) == 1
|
||||||
|
}
|
419
weed/s3api/auto_signature_v4_test.go
Normal file
419
weed/s3api/auto_signature_v4_test.go
Normal file
|
@ -0,0 +1,419 @@
|
||||||
|
package s3api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"crypto/md5"
|
||||||
|
"crypto/sha256"
|
||||||
|
"encoding/base64"
|
||||||
|
"encoding/hex"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"sort"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
"unicode/utf8"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TestIsRequestPresignedSignatureV4 - Test validates the logic for presign signature verision v4 detection.
|
||||||
|
func TestIsRequestPresignedSignatureV4(t *testing.T) {
|
||||||
|
testCases := []struct {
|
||||||
|
inputQueryKey string
|
||||||
|
inputQueryValue string
|
||||||
|
expectedResult bool
|
||||||
|
}{
|
||||||
|
// Test case - 1.
|
||||||
|
// Test case with query key ""X-Amz-Credential" set.
|
||||||
|
{"", "", false},
|
||||||
|
// Test case - 2.
|
||||||
|
{"X-Amz-Credential", "", true},
|
||||||
|
// Test case - 3.
|
||||||
|
{"X-Amz-Content-Sha256", "", false},
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, testCase := range testCases {
|
||||||
|
// creating an input HTTP request.
|
||||||
|
// Only the query parameters are relevant for this particular test.
|
||||||
|
inputReq, err := http.NewRequest("GET", "http://example.com", nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Error initializing input HTTP request: %v", err)
|
||||||
|
}
|
||||||
|
q := inputReq.URL.Query()
|
||||||
|
q.Add(testCase.inputQueryKey, testCase.inputQueryValue)
|
||||||
|
inputReq.URL.RawQuery = q.Encode()
|
||||||
|
|
||||||
|
actualResult := isRequestPresignedSignatureV4(inputReq)
|
||||||
|
if testCase.expectedResult != actualResult {
|
||||||
|
t.Errorf("Test %d: Expected the result to `%v`, but instead got `%v`", i+1, testCase.expectedResult, actualResult)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Tests is requested authenticated function, tests replies for s3 errors.
|
||||||
|
func TestIsReqAuthenticated(t *testing.T) {
|
||||||
|
iam := NewIdentityAccessManagement("", "")
|
||||||
|
iam.identities = []*Identity{
|
||||||
|
{
|
||||||
|
Name: "someone",
|
||||||
|
Credentials: []*Credential{
|
||||||
|
{
|
||||||
|
AccessKey: "access_key_1",
|
||||||
|
SecretKey: "secret_key_1",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Actions: nil,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// List of test cases for validating http request authentication.
|
||||||
|
testCases := []struct {
|
||||||
|
req *http.Request
|
||||||
|
s3Error ErrorCode
|
||||||
|
}{
|
||||||
|
// When request is unsigned, access denied is returned.
|
||||||
|
{mustNewRequest("GET", "http://127.0.0.1:9000", 0, nil, t), ErrAccessDenied},
|
||||||
|
// When request is properly signed, error is none.
|
||||||
|
{mustNewSignedRequest("GET", "http://127.0.0.1:9000", 0, nil, t), ErrNone},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validates all testcases.
|
||||||
|
for i, testCase := range testCases {
|
||||||
|
if _, s3Error := iam.reqSignatureV4Verify(testCase.req); s3Error != testCase.s3Error {
|
||||||
|
ioutil.ReadAll(testCase.req.Body)
|
||||||
|
t.Fatalf("Test %d: Unexpected S3 error: want %d - got %d", i, testCase.s3Error, s3Error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCheckAdminRequestAuthType(t *testing.T) {
|
||||||
|
iam := NewIdentityAccessManagement("", "")
|
||||||
|
iam.identities = []*Identity{
|
||||||
|
{
|
||||||
|
Name: "someone",
|
||||||
|
Credentials: []*Credential{
|
||||||
|
{
|
||||||
|
AccessKey: "access_key_1",
|
||||||
|
SecretKey: "secret_key_1",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Actions: nil,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
testCases := []struct {
|
||||||
|
Request *http.Request
|
||||||
|
ErrCode ErrorCode
|
||||||
|
}{
|
||||||
|
{Request: mustNewRequest("GET", "http://127.0.0.1:9000", 0, nil, t), ErrCode: ErrAccessDenied},
|
||||||
|
{Request: mustNewSignedRequest("GET", "http://127.0.0.1:9000", 0, nil, t), ErrCode: ErrNone},
|
||||||
|
{Request: mustNewPresignedRequest("GET", "http://127.0.0.1:9000", 0, nil, t), ErrCode: ErrNone},
|
||||||
|
}
|
||||||
|
for i, testCase := range testCases {
|
||||||
|
if _, s3Error := iam.reqSignatureV4Verify(testCase.Request); s3Error != testCase.ErrCode {
|
||||||
|
t.Errorf("Test %d: Unexpected s3error returned wanted %d, got %d", i, testCase.ErrCode, s3Error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Provides a fully populated http request instance, fails otherwise.
|
||||||
|
func mustNewRequest(method string, urlStr string, contentLength int64, body io.ReadSeeker, t *testing.T) *http.Request {
|
||||||
|
req, err := newTestRequest(method, urlStr, contentLength, body)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Unable to initialize new http request %s", err)
|
||||||
|
}
|
||||||
|
return req
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is similar to mustNewRequest but additionally the request
|
||||||
|
// is signed with AWS Signature V4, fails if not able to do so.
|
||||||
|
func mustNewSignedRequest(method string, urlStr string, contentLength int64, body io.ReadSeeker, t *testing.T) *http.Request {
|
||||||
|
req := mustNewRequest(method, urlStr, contentLength, body, t)
|
||||||
|
cred := &Credential{"access_key_1", "secret_key_1"}
|
||||||
|
if err := signRequestV4(req, cred.AccessKey, cred.SecretKey); err != nil {
|
||||||
|
t.Fatalf("Unable to inititalized new signed http request %s", err)
|
||||||
|
}
|
||||||
|
return req
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is similar to mustNewRequest but additionally the request
|
||||||
|
// is presigned with AWS Signature V4, fails if not able to do so.
|
||||||
|
func mustNewPresignedRequest(method string, urlStr string, contentLength int64, body io.ReadSeeker, t *testing.T) *http.Request {
|
||||||
|
req := mustNewRequest(method, urlStr, contentLength, body, t)
|
||||||
|
cred := &Credential{"access_key_1", "secret_key_1"}
|
||||||
|
if err := preSignV4(req, cred.AccessKey, cred.SecretKey, int64(10*time.Minute.Seconds())); err != nil {
|
||||||
|
t.Fatalf("Unable to inititalized new signed http request %s", err)
|
||||||
|
}
|
||||||
|
return req
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns new HTTP request object.
|
||||||
|
func newTestRequest(method, urlStr string, contentLength int64, body io.ReadSeeker) (*http.Request, error) {
|
||||||
|
if method == "" {
|
||||||
|
method = "POST"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save for subsequent use
|
||||||
|
var hashedPayload string
|
||||||
|
var md5Base64 string
|
||||||
|
switch {
|
||||||
|
case body == nil:
|
||||||
|
hashedPayload = getSHA256Hash([]byte{})
|
||||||
|
default:
|
||||||
|
payloadBytes, err := ioutil.ReadAll(body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
hashedPayload = getSHA256Hash(payloadBytes)
|
||||||
|
md5Base64 = getMD5HashBase64(payloadBytes)
|
||||||
|
}
|
||||||
|
// Seek back to beginning.
|
||||||
|
if body != nil {
|
||||||
|
body.Seek(0, 0)
|
||||||
|
} else {
|
||||||
|
body = bytes.NewReader([]byte(""))
|
||||||
|
}
|
||||||
|
req, err := http.NewRequest(method, urlStr, body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if md5Base64 != "" {
|
||||||
|
req.Header.Set("Content-Md5", md5Base64)
|
||||||
|
}
|
||||||
|
req.Header.Set("x-amz-content-sha256", hashedPayload)
|
||||||
|
|
||||||
|
// Add Content-Length
|
||||||
|
req.ContentLength = contentLength
|
||||||
|
|
||||||
|
return req, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// getSHA256Hash returns SHA-256 hash in hex encoding of given data.
|
||||||
|
func getSHA256Hash(data []byte) string {
|
||||||
|
return hex.EncodeToString(getSHA256Sum(data))
|
||||||
|
}
|
||||||
|
|
||||||
|
// getMD5HashBase64 returns MD5 hash in base64 encoding of given data.
|
||||||
|
func getMD5HashBase64(data []byte) string {
|
||||||
|
return base64.StdEncoding.EncodeToString(getMD5Sum(data))
|
||||||
|
}
|
||||||
|
|
||||||
|
// getSHA256Hash returns SHA-256 sum of given data.
|
||||||
|
func getSHA256Sum(data []byte) []byte {
|
||||||
|
hash := sha256.New()
|
||||||
|
hash.Write(data)
|
||||||
|
return hash.Sum(nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// getMD5Sum returns MD5 sum of given data.
|
||||||
|
func getMD5Sum(data []byte) []byte {
|
||||||
|
hash := md5.New()
|
||||||
|
hash.Write(data)
|
||||||
|
return hash.Sum(nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// getMD5Hash returns MD5 hash in hex encoding of given data.
|
||||||
|
func getMD5Hash(data []byte) string {
|
||||||
|
return hex.EncodeToString(getMD5Sum(data))
|
||||||
|
}
|
||||||
|
|
||||||
|
var ignoredHeaders = map[string]bool{
|
||||||
|
"Authorization": true,
|
||||||
|
"Content-Type": true,
|
||||||
|
"Content-Length": true,
|
||||||
|
"User-Agent": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sign given request using Signature V4.
|
||||||
|
func signRequestV4(req *http.Request, accessKey, secretKey string) error {
|
||||||
|
// Get hashed payload.
|
||||||
|
hashedPayload := req.Header.Get("x-amz-content-sha256")
|
||||||
|
if hashedPayload == "" {
|
||||||
|
return fmt.Errorf("Invalid hashed payload")
|
||||||
|
}
|
||||||
|
|
||||||
|
currTime := time.Now()
|
||||||
|
|
||||||
|
// Set x-amz-date.
|
||||||
|
req.Header.Set("x-amz-date", currTime.Format(iso8601Format))
|
||||||
|
|
||||||
|
// Get header map.
|
||||||
|
headerMap := make(map[string][]string)
|
||||||
|
for k, vv := range req.Header {
|
||||||
|
// If request header key is not in ignored headers, then add it.
|
||||||
|
if _, ok := ignoredHeaders[http.CanonicalHeaderKey(k)]; !ok {
|
||||||
|
headerMap[strings.ToLower(k)] = vv
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get header keys.
|
||||||
|
headers := []string{"host"}
|
||||||
|
for k := range headerMap {
|
||||||
|
headers = append(headers, k)
|
||||||
|
}
|
||||||
|
sort.Strings(headers)
|
||||||
|
|
||||||
|
region := "us-east-1"
|
||||||
|
|
||||||
|
// Get canonical headers.
|
||||||
|
var buf bytes.Buffer
|
||||||
|
for _, k := range headers {
|
||||||
|
buf.WriteString(k)
|
||||||
|
buf.WriteByte(':')
|
||||||
|
switch {
|
||||||
|
case k == "host":
|
||||||
|
buf.WriteString(req.URL.Host)
|
||||||
|
fallthrough
|
||||||
|
default:
|
||||||
|
for idx, v := range headerMap[k] {
|
||||||
|
if idx > 0 {
|
||||||
|
buf.WriteByte(',')
|
||||||
|
}
|
||||||
|
buf.WriteString(v)
|
||||||
|
}
|
||||||
|
buf.WriteByte('\n')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
canonicalHeaders := buf.String()
|
||||||
|
|
||||||
|
// Get signed headers.
|
||||||
|
signedHeaders := strings.Join(headers, ";")
|
||||||
|
|
||||||
|
// Get canonical query string.
|
||||||
|
req.URL.RawQuery = strings.Replace(req.URL.Query().Encode(), "+", "%20", -1)
|
||||||
|
|
||||||
|
// Get canonical URI.
|
||||||
|
canonicalURI := EncodePath(req.URL.Path)
|
||||||
|
|
||||||
|
// Get canonical request.
|
||||||
|
// canonicalRequest =
|
||||||
|
// <HTTPMethod>\n
|
||||||
|
// <CanonicalURI>\n
|
||||||
|
// <CanonicalQueryString>\n
|
||||||
|
// <CanonicalHeaders>\n
|
||||||
|
// <SignedHeaders>\n
|
||||||
|
// <HashedPayload>
|
||||||
|
//
|
||||||
|
canonicalRequest := strings.Join([]string{
|
||||||
|
req.Method,
|
||||||
|
canonicalURI,
|
||||||
|
req.URL.RawQuery,
|
||||||
|
canonicalHeaders,
|
||||||
|
signedHeaders,
|
||||||
|
hashedPayload,
|
||||||
|
}, "\n")
|
||||||
|
|
||||||
|
// Get scope.
|
||||||
|
scope := strings.Join([]string{
|
||||||
|
currTime.Format(yyyymmdd),
|
||||||
|
region,
|
||||||
|
"s3",
|
||||||
|
"aws4_request",
|
||||||
|
}, "/")
|
||||||
|
|
||||||
|
stringToSign := "AWS4-HMAC-SHA256" + "\n" + currTime.Format(iso8601Format) + "\n"
|
||||||
|
stringToSign = stringToSign + scope + "\n"
|
||||||
|
stringToSign = stringToSign + getSHA256Hash([]byte(canonicalRequest))
|
||||||
|
|
||||||
|
date := sumHMAC([]byte("AWS4"+secretKey), []byte(currTime.Format(yyyymmdd)))
|
||||||
|
regionHMAC := sumHMAC(date, []byte(region))
|
||||||
|
service := sumHMAC(regionHMAC, []byte("s3"))
|
||||||
|
signingKey := sumHMAC(service, []byte("aws4_request"))
|
||||||
|
|
||||||
|
signature := hex.EncodeToString(sumHMAC(signingKey, []byte(stringToSign)))
|
||||||
|
|
||||||
|
// final Authorization header
|
||||||
|
parts := []string{
|
||||||
|
"AWS4-HMAC-SHA256" + " Credential=" + accessKey + "/" + scope,
|
||||||
|
"SignedHeaders=" + signedHeaders,
|
||||||
|
"Signature=" + signature,
|
||||||
|
}
|
||||||
|
auth := strings.Join(parts, ", ")
|
||||||
|
req.Header.Set("Authorization", auth)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// preSignV4 presign the request, in accordance with
|
||||||
|
// http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html.
|
||||||
|
func preSignV4(req *http.Request, accessKeyID, secretAccessKey string, expires int64) error {
|
||||||
|
// Presign is not needed for anonymous credentials.
|
||||||
|
if accessKeyID == "" || secretAccessKey == "" {
|
||||||
|
return errors.New("Presign cannot be generated without access and secret keys")
|
||||||
|
}
|
||||||
|
|
||||||
|
region := "us-east-1"
|
||||||
|
date := time.Now().UTC()
|
||||||
|
scope := getScope(date, region)
|
||||||
|
credential := fmt.Sprintf("%s/%s", accessKeyID, scope)
|
||||||
|
|
||||||
|
// Set URL query.
|
||||||
|
query := req.URL.Query()
|
||||||
|
query.Set("X-Amz-Algorithm", signV4Algorithm)
|
||||||
|
query.Set("X-Amz-Date", date.Format(iso8601Format))
|
||||||
|
query.Set("X-Amz-Expires", strconv.FormatInt(expires, 10))
|
||||||
|
query.Set("X-Amz-SignedHeaders", "host")
|
||||||
|
query.Set("X-Amz-Credential", credential)
|
||||||
|
query.Set("X-Amz-Content-Sha256", unsignedPayload)
|
||||||
|
|
||||||
|
// "host" is the only header required to be signed for Presigned URLs.
|
||||||
|
extractedSignedHeaders := make(http.Header)
|
||||||
|
extractedSignedHeaders.Set("host", req.Host)
|
||||||
|
|
||||||
|
queryStr := strings.Replace(query.Encode(), "+", "%20", -1)
|
||||||
|
canonicalRequest := getCanonicalRequest(extractedSignedHeaders, unsignedPayload, queryStr, req.URL.Path, req.Method)
|
||||||
|
stringToSign := getStringToSign(canonicalRequest, date, scope)
|
||||||
|
signingKey := getSigningKey(secretAccessKey, date, region)
|
||||||
|
signature := getSignature(signingKey, stringToSign)
|
||||||
|
|
||||||
|
req.URL.RawQuery = query.Encode()
|
||||||
|
|
||||||
|
// Add signature header to RawQuery.
|
||||||
|
req.URL.RawQuery += "&X-Amz-Signature=" + url.QueryEscape(signature)
|
||||||
|
|
||||||
|
// Construct the final presigned URL.
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// EncodePath encode the strings from UTF-8 byte representations to HTML hex escape sequences
|
||||||
|
//
|
||||||
|
// This is necessary since regular url.Parse() and url.Encode() functions do not support UTF-8
|
||||||
|
// non english characters cannot be parsed due to the nature in which url.Encode() is written
|
||||||
|
//
|
||||||
|
// This function on the other hand is a direct replacement for url.Encode() technique to support
|
||||||
|
// pretty much every UTF-8 character.
|
||||||
|
func EncodePath(pathName string) string {
|
||||||
|
if reservedObjectNames.MatchString(pathName) {
|
||||||
|
return pathName
|
||||||
|
}
|
||||||
|
var encodedPathname string
|
||||||
|
for _, s := range pathName {
|
||||||
|
if 'A' <= s && s <= 'Z' || 'a' <= s && s <= 'z' || '0' <= s && s <= '9' { // §2.3 Unreserved characters (mark)
|
||||||
|
encodedPathname = encodedPathname + string(s)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
switch s {
|
||||||
|
case '-', '_', '.', '~', '/': // §2.3 Unreserved characters (mark)
|
||||||
|
encodedPathname = encodedPathname + string(s)
|
||||||
|
continue
|
||||||
|
default:
|
||||||
|
len := utf8.RuneLen(s)
|
||||||
|
if len < 0 {
|
||||||
|
// if utf8 cannot convert return the same string as is
|
||||||
|
return pathName
|
||||||
|
}
|
||||||
|
u := make([]byte, len)
|
||||||
|
utf8.EncodeRune(u, s)
|
||||||
|
for _, r := range u {
|
||||||
|
hex := hex.EncodeToString([]byte{r})
|
||||||
|
encodedPathname = encodedPathname + "%" + strings.ToUpper(hex)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return encodedPathname
|
||||||
|
}
|
|
@ -21,16 +21,114 @@ package s3api
|
||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"crypto/sha256"
|
||||||
|
"encoding/hex"
|
||||||
"errors"
|
"errors"
|
||||||
"github.com/dustin/go-humanize"
|
"hash"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/dustin/go-humanize"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Streaming AWS Signature Version '4' constants.
|
// getChunkSignature - get chunk signature.
|
||||||
const (
|
func getChunkSignature(secretKey string, seedSignature string, region string, date time.Time, hashedChunk string) string {
|
||||||
streamingContentSHA256 = "STREAMING-AWS4-HMAC-SHA256-PAYLOAD"
|
|
||||||
)
|
// Calculate string to sign.
|
||||||
|
stringToSign := signV4ChunkedAlgorithm + "\n" +
|
||||||
|
date.Format(iso8601Format) + "\n" +
|
||||||
|
getScope(date, region) + "\n" +
|
||||||
|
seedSignature + "\n" +
|
||||||
|
emptySHA256 + "\n" +
|
||||||
|
hashedChunk
|
||||||
|
|
||||||
|
// Get hmac signing key.
|
||||||
|
signingKey := getSigningKey(secretKey, date, region)
|
||||||
|
|
||||||
|
// Calculate signature.
|
||||||
|
newSignature := getSignature(signingKey, stringToSign)
|
||||||
|
|
||||||
|
return newSignature
|
||||||
|
}
|
||||||
|
|
||||||
|
// calculateSeedSignature - Calculate seed signature in accordance with
|
||||||
|
// - http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-streaming.html
|
||||||
|
// returns signature, error otherwise if the signature mismatches or any other
|
||||||
|
// error while parsing and validating.
|
||||||
|
func (iam *IdentityAccessManagement) calculateSeedSignature(r *http.Request) (cred *Credential, signature string, region string, date time.Time, errCode ErrorCode) {
|
||||||
|
|
||||||
|
// Copy request.
|
||||||
|
req := *r
|
||||||
|
|
||||||
|
// Save authorization header.
|
||||||
|
v4Auth := req.Header.Get("Authorization")
|
||||||
|
|
||||||
|
// Parse signature version '4' header.
|
||||||
|
signV4Values, errCode := parseSignV4(v4Auth)
|
||||||
|
if errCode != ErrNone {
|
||||||
|
return nil, "", "", time.Time{}, errCode
|
||||||
|
}
|
||||||
|
|
||||||
|
// Payload streaming.
|
||||||
|
payload := streamingContentSHA256
|
||||||
|
|
||||||
|
// Payload for STREAMING signature should be 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD'
|
||||||
|
if payload != req.Header.Get("X-Amz-Content-Sha256") {
|
||||||
|
return nil, "", "", time.Time{}, ErrContentSHA256Mismatch
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract all the signed headers along with its values.
|
||||||
|
extractedSignedHeaders, errCode := extractSignedHeaders(signV4Values.SignedHeaders, r)
|
||||||
|
if errCode != ErrNone {
|
||||||
|
return nil, "", "", time.Time{}, errCode
|
||||||
|
}
|
||||||
|
// Verify if the access key id matches.
|
||||||
|
_, cred, found := iam.lookupByAccessKey(signV4Values.Credential.accessKey)
|
||||||
|
if !found {
|
||||||
|
return nil, "", "", time.Time{}, ErrInvalidAccessKeyID
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify if region is valid.
|
||||||
|
region = signV4Values.Credential.scope.region
|
||||||
|
|
||||||
|
// Extract date, if not present throw error.
|
||||||
|
var dateStr string
|
||||||
|
if dateStr = req.Header.Get(http.CanonicalHeaderKey("x-amz-date")); dateStr == "" {
|
||||||
|
if dateStr = r.Header.Get("Date"); dateStr == "" {
|
||||||
|
return nil, "", "", time.Time{}, ErrMissingDateHeader
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Parse date header.
|
||||||
|
var err error
|
||||||
|
date, err = time.Parse(iso8601Format, dateStr)
|
||||||
|
if err != nil {
|
||||||
|
return nil, "", "", time.Time{}, ErrMalformedDate
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query string.
|
||||||
|
queryStr := req.URL.Query().Encode()
|
||||||
|
|
||||||
|
// Get canonical request.
|
||||||
|
canonicalRequest := getCanonicalRequest(extractedSignedHeaders, payload, queryStr, req.URL.Path, req.Method)
|
||||||
|
|
||||||
|
// Get string to sign from canonical request.
|
||||||
|
stringToSign := getStringToSign(canonicalRequest, date, signV4Values.Credential.getScope())
|
||||||
|
|
||||||
|
// Get hmac signing key.
|
||||||
|
signingKey := getSigningKey(cred.SecretKey, signV4Values.Credential.scope.date, region)
|
||||||
|
|
||||||
|
// Calculate signature.
|
||||||
|
newSignature := getSignature(signingKey, stringToSign)
|
||||||
|
|
||||||
|
// Verify if signature match.
|
||||||
|
if !compareSignatureV4(newSignature, signV4Values.Signature) {
|
||||||
|
return nil, "", "", time.Time{}, ErrSignatureDoesNotMatch
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return caculated signature.
|
||||||
|
return cred, newSignature, region, date, ErrNone
|
||||||
|
}
|
||||||
|
|
||||||
const maxLineLength = 4 * humanize.KiByte // assumed <= bufio.defaultBufSize 4KiB
|
const maxLineLength = 4 * humanize.KiByte // assumed <= bufio.defaultBufSize 4KiB
|
||||||
|
|
||||||
|
@ -43,20 +141,34 @@ var errMalformedEncoding = errors.New("malformed chunked encoding")
|
||||||
// newSignV4ChunkedReader returns a new s3ChunkedReader that translates the data read from r
|
// newSignV4ChunkedReader returns a new s3ChunkedReader that translates the data read from r
|
||||||
// out of HTTP "chunked" format before returning it.
|
// out of HTTP "chunked" format before returning it.
|
||||||
// The s3ChunkedReader returns io.EOF when the final 0-length chunk is read.
|
// The s3ChunkedReader returns io.EOF when the final 0-length chunk is read.
|
||||||
func newSignV4ChunkedReader(req *http.Request) io.ReadCloser {
|
func (iam *IdentityAccessManagement) newSignV4ChunkedReader(req *http.Request) (io.ReadCloser, ErrorCode) {
|
||||||
return &s3ChunkedReader{
|
ident, seedSignature, region, seedDate, errCode := iam.calculateSeedSignature(req)
|
||||||
reader: bufio.NewReader(req.Body),
|
if errCode != ErrNone {
|
||||||
state: readChunkHeader,
|
return nil, errCode
|
||||||
}
|
}
|
||||||
|
return &s3ChunkedReader{
|
||||||
|
cred: ident,
|
||||||
|
reader: bufio.NewReader(req.Body),
|
||||||
|
seedSignature: seedSignature,
|
||||||
|
seedDate: seedDate,
|
||||||
|
region: region,
|
||||||
|
chunkSHA256Writer: sha256.New(),
|
||||||
|
state: readChunkHeader,
|
||||||
|
}, ErrNone
|
||||||
}
|
}
|
||||||
|
|
||||||
// Represents the overall state that is required for decoding a
|
// Represents the overall state that is required for decoding a
|
||||||
// AWS Signature V4 chunked reader.
|
// AWS Signature V4 chunked reader.
|
||||||
type s3ChunkedReader struct {
|
type s3ChunkedReader struct {
|
||||||
|
cred *Credential
|
||||||
reader *bufio.Reader
|
reader *bufio.Reader
|
||||||
|
seedSignature string
|
||||||
|
seedDate time.Time
|
||||||
|
region string
|
||||||
state chunkState
|
state chunkState
|
||||||
lastChunk bool
|
lastChunk bool
|
||||||
chunkSignature string
|
chunkSignature string
|
||||||
|
chunkSHA256Writer hash.Hash // Calculates sha256 of chunk data.
|
||||||
n uint64 // Unread bytes in chunk
|
n uint64 // Unread bytes in chunk
|
||||||
err error
|
err error
|
||||||
}
|
}
|
||||||
|
@ -157,6 +269,9 @@ func (cr *s3ChunkedReader) Read(buf []byte) (n int, err error) {
|
||||||
return 0, cr.err
|
return 0, cr.err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Calculate sha256.
|
||||||
|
cr.chunkSHA256Writer.Write(rbuf[:n0])
|
||||||
|
|
||||||
// Update the bytes read into request buffer so far.
|
// Update the bytes read into request buffer so far.
|
||||||
n += n0
|
n += n0
|
||||||
buf = buf[n0:]
|
buf = buf[n0:]
|
||||||
|
@ -169,6 +284,19 @@ func (cr *s3ChunkedReader) Read(buf []byte) (n int, err error) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
case verifyChunk:
|
case verifyChunk:
|
||||||
|
// Calculate the hashed chunk.
|
||||||
|
hashedChunk := hex.EncodeToString(cr.chunkSHA256Writer.Sum(nil))
|
||||||
|
// Calculate the chunk signature.
|
||||||
|
newSignature := getChunkSignature(cr.cred.SecretKey, cr.seedSignature, cr.region, cr.seedDate, hashedChunk)
|
||||||
|
if !compareSignatureV4(cr.chunkSignature, newSignature) {
|
||||||
|
// Chunk signature doesn't match we return signature does not match.
|
||||||
|
cr.err = errors.New("chunk signature does not match")
|
||||||
|
return 0, cr.err
|
||||||
|
}
|
||||||
|
// Newly calculated signature becomes the seed for the next chunk
|
||||||
|
// this follows the chaining.
|
||||||
|
cr.seedSignature = newSignature
|
||||||
|
cr.chunkSHA256Writer.Reset()
|
||||||
if cr.lastChunk {
|
if cr.lastChunk {
|
||||||
cr.state = eofChunk
|
cr.state = eofChunk
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -9,6 +9,8 @@ import (
|
||||||
const (
|
const (
|
||||||
signV4Algorithm = "AWS4-HMAC-SHA256"
|
signV4Algorithm = "AWS4-HMAC-SHA256"
|
||||||
signV2Algorithm = "AWS"
|
signV2Algorithm = "AWS"
|
||||||
|
iso8601Format = "20060102T150405Z"
|
||||||
|
yyyymmdd = "20060102"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Verify if request has JWT.
|
// Verify if request has JWT.
|
||||||
|
@ -23,8 +25,8 @@ func isRequestSignatureV4(r *http.Request) bool {
|
||||||
|
|
||||||
// Verify if request has AWS Signature Version '2'.
|
// Verify if request has AWS Signature Version '2'.
|
||||||
func isRequestSignatureV2(r *http.Request) bool {
|
func isRequestSignatureV2(r *http.Request) bool {
|
||||||
return (!strings.HasPrefix(r.Header.Get("Authorization"), signV4Algorithm) &&
|
return !strings.HasPrefix(r.Header.Get("Authorization"), signV4Algorithm) &&
|
||||||
strings.HasPrefix(r.Header.Get("Authorization"), signV2Algorithm))
|
strings.HasPrefix(r.Header.Get("Authorization"), signV2Algorithm)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Verify if request has AWS PreSign Version '4'.
|
// Verify if request has AWS PreSign Version '4'.
|
||||||
|
|
|
@ -27,6 +27,7 @@ type ErrorCode int
|
||||||
// Error codes, see full list at http://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html
|
// Error codes, see full list at http://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html
|
||||||
const (
|
const (
|
||||||
ErrNone ErrorCode = iota
|
ErrNone ErrorCode = iota
|
||||||
|
ErrAccessDenied
|
||||||
ErrMethodNotAllowed
|
ErrMethodNotAllowed
|
||||||
ErrBucketNotEmpty
|
ErrBucketNotEmpty
|
||||||
ErrBucketAlreadyExists
|
ErrBucketAlreadyExists
|
||||||
|
@ -43,12 +44,40 @@ const (
|
||||||
ErrInternalError
|
ErrInternalError
|
||||||
ErrInvalidCopyDest
|
ErrInvalidCopyDest
|
||||||
ErrInvalidCopySource
|
ErrInvalidCopySource
|
||||||
|
ErrAuthHeaderEmpty
|
||||||
|
ErrSignatureVersionNotSupported
|
||||||
|
ErrMissingFields
|
||||||
|
ErrMissingCredTag
|
||||||
|
ErrCredMalformed
|
||||||
|
ErrMalformedDate
|
||||||
|
ErrMalformedPresignedDate
|
||||||
|
ErrMalformedCredentialDate
|
||||||
|
ErrMissingSignHeadersTag
|
||||||
|
ErrMissingSignTag
|
||||||
|
ErrUnsignedHeaders
|
||||||
|
ErrInvalidQueryParams
|
||||||
|
ErrInvalidQuerySignatureAlgo
|
||||||
|
ErrExpiredPresignRequest
|
||||||
|
ErrMalformedExpires
|
||||||
|
ErrNegativeExpires
|
||||||
|
ErrMaximumExpires
|
||||||
|
ErrSignatureDoesNotMatch
|
||||||
|
ErrContentSHA256Mismatch
|
||||||
|
ErrInvalidAccessKeyID
|
||||||
|
ErrRequestNotReadyYet
|
||||||
|
ErrMissingDateHeader
|
||||||
|
ErrInvalidRequest
|
||||||
ErrNotImplemented
|
ErrNotImplemented
|
||||||
)
|
)
|
||||||
|
|
||||||
// error code to APIError structure, these fields carry respective
|
// error code to APIError structure, these fields carry respective
|
||||||
// descriptions for all the error responses.
|
// descriptions for all the error responses.
|
||||||
var errorCodeResponse = map[ErrorCode]APIError{
|
var errorCodeResponse = map[ErrorCode]APIError{
|
||||||
|
ErrAccessDenied: {
|
||||||
|
Code: "AccessDenied",
|
||||||
|
Description: "Access Denied.",
|
||||||
|
HTTPStatusCode: http.StatusForbidden,
|
||||||
|
},
|
||||||
ErrMethodNotAllowed: {
|
ErrMethodNotAllowed: {
|
||||||
Code: "MethodNotAllowed",
|
Code: "MethodNotAllowed",
|
||||||
Description: "The specified method is not allowed against this resource.",
|
Description: "The specified method is not allowed against this resource.",
|
||||||
|
@ -132,6 +161,121 @@ var errorCodeResponse = map[ErrorCode]APIError{
|
||||||
HTTPStatusCode: http.StatusBadRequest,
|
HTTPStatusCode: http.StatusBadRequest,
|
||||||
},
|
},
|
||||||
|
|
||||||
|
ErrAuthHeaderEmpty: {
|
||||||
|
Code: "InvalidArgument",
|
||||||
|
Description: "Authorization header is invalid -- one and only one ' ' (space) required.",
|
||||||
|
HTTPStatusCode: http.StatusBadRequest,
|
||||||
|
},
|
||||||
|
ErrSignatureVersionNotSupported: {
|
||||||
|
Code: "InvalidRequest",
|
||||||
|
Description: "The authorization mechanism you have provided is not supported. Please use AWS4-HMAC-SHA256.",
|
||||||
|
HTTPStatusCode: http.StatusBadRequest,
|
||||||
|
},
|
||||||
|
ErrMissingFields: {
|
||||||
|
Code: "MissingFields",
|
||||||
|
Description: "Missing fields in request.",
|
||||||
|
HTTPStatusCode: http.StatusBadRequest,
|
||||||
|
},
|
||||||
|
ErrMissingCredTag: {
|
||||||
|
Code: "InvalidRequest",
|
||||||
|
Description: "Missing Credential field for this request.",
|
||||||
|
HTTPStatusCode: http.StatusBadRequest,
|
||||||
|
},
|
||||||
|
ErrCredMalformed: {
|
||||||
|
Code: "AuthorizationQueryParametersError",
|
||||||
|
Description: "Error parsing the X-Amz-Credential parameter; the Credential is mal-formed; expecting \"<YOUR-AKID>/YYYYMMDD/REGION/SERVICE/aws4_request\".",
|
||||||
|
HTTPStatusCode: http.StatusBadRequest,
|
||||||
|
},
|
||||||
|
ErrMalformedDate: {
|
||||||
|
Code: "MalformedDate",
|
||||||
|
Description: "Invalid date format header, expected to be in ISO8601, RFC1123 or RFC1123Z time format.",
|
||||||
|
HTTPStatusCode: http.StatusBadRequest,
|
||||||
|
},
|
||||||
|
ErrMalformedPresignedDate: {
|
||||||
|
Code: "AuthorizationQueryParametersError",
|
||||||
|
Description: "X-Amz-Date must be in the ISO8601 Long Format \"yyyyMMdd'T'HHmmss'Z'\"",
|
||||||
|
HTTPStatusCode: http.StatusBadRequest,
|
||||||
|
},
|
||||||
|
ErrMissingSignHeadersTag: {
|
||||||
|
Code: "InvalidArgument",
|
||||||
|
Description: "Signature header missing SignedHeaders field.",
|
||||||
|
HTTPStatusCode: http.StatusBadRequest,
|
||||||
|
},
|
||||||
|
ErrMissingSignTag: {
|
||||||
|
Code: "AccessDenied",
|
||||||
|
Description: "Signature header missing Signature field.",
|
||||||
|
HTTPStatusCode: http.StatusBadRequest,
|
||||||
|
},
|
||||||
|
|
||||||
|
ErrUnsignedHeaders: {
|
||||||
|
Code: "AccessDenied",
|
||||||
|
Description: "There were headers present in the request which were not signed",
|
||||||
|
HTTPStatusCode: http.StatusBadRequest,
|
||||||
|
},
|
||||||
|
ErrInvalidQueryParams: {
|
||||||
|
Code: "AuthorizationQueryParametersError",
|
||||||
|
Description: "Query-string authentication version 4 requires the X-Amz-Algorithm, X-Amz-Credential, X-Amz-Signature, X-Amz-Date, X-Amz-SignedHeaders, and X-Amz-Expires parameters.",
|
||||||
|
HTTPStatusCode: http.StatusBadRequest,
|
||||||
|
},
|
||||||
|
ErrInvalidQuerySignatureAlgo: {
|
||||||
|
Code: "AuthorizationQueryParametersError",
|
||||||
|
Description: "X-Amz-Algorithm only supports \"AWS4-HMAC-SHA256\".",
|
||||||
|
HTTPStatusCode: http.StatusBadRequest,
|
||||||
|
},
|
||||||
|
ErrExpiredPresignRequest: {
|
||||||
|
Code: "AccessDenied",
|
||||||
|
Description: "Request has expired",
|
||||||
|
HTTPStatusCode: http.StatusForbidden,
|
||||||
|
},
|
||||||
|
ErrMalformedExpires: {
|
||||||
|
Code: "AuthorizationQueryParametersError",
|
||||||
|
Description: "X-Amz-Expires should be a number",
|
||||||
|
HTTPStatusCode: http.StatusBadRequest,
|
||||||
|
},
|
||||||
|
ErrNegativeExpires: {
|
||||||
|
Code: "AuthorizationQueryParametersError",
|
||||||
|
Description: "X-Amz-Expires must be non-negative",
|
||||||
|
HTTPStatusCode: http.StatusBadRequest,
|
||||||
|
},
|
||||||
|
ErrMaximumExpires: {
|
||||||
|
Code: "AuthorizationQueryParametersError",
|
||||||
|
Description: "X-Amz-Expires must be less than a week (in seconds); that is, the given X-Amz-Expires must be less than 604800 seconds",
|
||||||
|
HTTPStatusCode: http.StatusBadRequest,
|
||||||
|
},
|
||||||
|
|
||||||
|
ErrInvalidAccessKeyID: {
|
||||||
|
Code: "InvalidAccessKeyId",
|
||||||
|
Description: "The access key ID you provided does not exist in our records.",
|
||||||
|
HTTPStatusCode: http.StatusForbidden,
|
||||||
|
},
|
||||||
|
|
||||||
|
ErrRequestNotReadyYet: {
|
||||||
|
Code: "AccessDenied",
|
||||||
|
Description: "Request is not valid yet",
|
||||||
|
HTTPStatusCode: http.StatusForbidden,
|
||||||
|
},
|
||||||
|
|
||||||
|
ErrSignatureDoesNotMatch: {
|
||||||
|
Code: "SignatureDoesNotMatch",
|
||||||
|
Description: "The request signature we calculated does not match the signature you provided. Check your key and signing method.",
|
||||||
|
HTTPStatusCode: http.StatusForbidden,
|
||||||
|
},
|
||||||
|
|
||||||
|
ErrContentSHA256Mismatch: {
|
||||||
|
Code: "XAmzContentSHA256Mismatch",
|
||||||
|
Description: "The provided 'x-amz-content-sha256' header does not match what was computed.",
|
||||||
|
HTTPStatusCode: http.StatusBadRequest,
|
||||||
|
},
|
||||||
|
ErrMissingDateHeader: {
|
||||||
|
Code: "AccessDenied",
|
||||||
|
Description: "AWS authentication requires a valid Date or x-amz-date header",
|
||||||
|
HTTPStatusCode: http.StatusBadRequest,
|
||||||
|
},
|
||||||
|
ErrInvalidRequest: {
|
||||||
|
Code: "InvalidRequest",
|
||||||
|
Description: "Invalid Request",
|
||||||
|
HTTPStatusCode: http.StatusBadRequest,
|
||||||
|
},
|
||||||
ErrNotImplemented: {
|
ErrNotImplemented: {
|
||||||
Code: "NotImplemented",
|
Code: "NotImplemented",
|
||||||
Description: "A header you provided implies functionality that is not implemented",
|
Description: "A header you provided implies functionality that is not implemented",
|
||||||
|
|
|
@ -42,8 +42,13 @@ func (s3a *S3ApiServer) PutObjectHandler(w http.ResponseWriter, r *http.Request)
|
||||||
|
|
||||||
rAuthType := getRequestAuthType(r)
|
rAuthType := getRequestAuthType(r)
|
||||||
dataReader := r.Body
|
dataReader := r.Body
|
||||||
|
var s3ErrCode ErrorCode
|
||||||
if rAuthType == authTypeStreamingSigned {
|
if rAuthType == authTypeStreamingSigned {
|
||||||
dataReader = newSignV4ChunkedReader(r)
|
dataReader, s3ErrCode = s3a.iam.newSignV4ChunkedReader(r)
|
||||||
|
}
|
||||||
|
if s3ErrCode != ErrNone {
|
||||||
|
writeErrorResponse(w, s3ErrCode, r.URL)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
defer dataReader.Close()
|
defer dataReader.Close()
|
||||||
|
|
||||||
|
|
|
@ -3,13 +3,14 @@ package s3api
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/aws/aws-sdk-go/aws"
|
|
||||||
"github.com/aws/aws-sdk-go/service/s3"
|
|
||||||
"github.com/gorilla/mux"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/aws/aws-sdk-go/aws"
|
||||||
|
"github.com/aws/aws-sdk-go/service/s3"
|
||||||
|
"github.com/gorilla/mux"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
@ -195,9 +196,14 @@ func (s3a *S3ApiServer) PutObjectPartHandler(w http.ResponseWriter, r *http.Requ
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var s3ErrCode ErrorCode
|
||||||
dataReader := r.Body
|
dataReader := r.Body
|
||||||
if rAuthType == authTypeStreamingSigned {
|
if rAuthType == authTypeStreamingSigned {
|
||||||
dataReader = newSignV4ChunkedReader(r)
|
dataReader, s3ErrCode = s3a.iam.newSignV4ChunkedReader(r)
|
||||||
|
}
|
||||||
|
if s3ErrCode != ErrNone {
|
||||||
|
writeErrorResponse(w, s3ErrCode, r.URL)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
defer dataReader.Close()
|
defer dataReader.Close()
|
||||||
|
|
||||||
|
|
|
@ -1,14 +1,16 @@
|
||||||
package s3api
|
package s3api
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"net/http"
|
||||||
|
|
||||||
"github.com/gorilla/mux"
|
"github.com/gorilla/mux"
|
||||||
"google.golang.org/grpc"
|
"google.golang.org/grpc"
|
||||||
"net/http"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type S3ApiServerOption struct {
|
type S3ApiServerOption struct {
|
||||||
Filer string
|
Filer string
|
||||||
FilerGrpcAddress string
|
FilerGrpcAddress string
|
||||||
|
Config string
|
||||||
DomainName string
|
DomainName string
|
||||||
BucketsPath string
|
BucketsPath string
|
||||||
GrpcDialOption grpc.DialOption
|
GrpcDialOption grpc.DialOption
|
||||||
|
@ -16,11 +18,13 @@ type S3ApiServerOption struct {
|
||||||
|
|
||||||
type S3ApiServer struct {
|
type S3ApiServer struct {
|
||||||
option *S3ApiServerOption
|
option *S3ApiServerOption
|
||||||
|
iam *IdentityAccessManagement
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewS3ApiServer(router *mux.Router, option *S3ApiServerOption) (s3ApiServer *S3ApiServer, err error) {
|
func NewS3ApiServer(router *mux.Router, option *S3ApiServerOption) (s3ApiServer *S3ApiServer, err error) {
|
||||||
s3ApiServer = &S3ApiServer{
|
s3ApiServer = &S3ApiServer{
|
||||||
option: option,
|
option: option,
|
||||||
|
iam: NewIdentityAccessManagement(option.Config, option.DomainName),
|
||||||
}
|
}
|
||||||
|
|
||||||
s3ApiServer.registerRouter(router)
|
s3ApiServer.registerRouter(router)
|
||||||
|
@ -40,46 +44,46 @@ func (s3a *S3ApiServer) registerRouter(router *mux.Router) {
|
||||||
for _, bucket := range routers {
|
for _, bucket := range routers {
|
||||||
|
|
||||||
// HeadObject
|
// HeadObject
|
||||||
bucket.Methods("HEAD").Path("/{object:.+}").HandlerFunc(s3a.HeadObjectHandler)
|
bucket.Methods("HEAD").Path("/{object:.+}").HandlerFunc(s3a.iam.Auth(s3a.HeadObjectHandler, ACTION_READ))
|
||||||
// HeadBucket
|
// HeadBucket
|
||||||
bucket.Methods("HEAD").HandlerFunc(s3a.HeadBucketHandler)
|
bucket.Methods("HEAD").HandlerFunc(s3a.iam.Auth(s3a.HeadBucketHandler, ACTION_ADMIN))
|
||||||
|
|
||||||
// CopyObjectPart
|
// CopyObjectPart
|
||||||
bucket.Methods("PUT").Path("/{object:.+}").HeadersRegexp("X-Amz-Copy-Source", ".*?(\\/|%2F).*?").HandlerFunc(s3a.CopyObjectPartHandler).Queries("partNumber", "{partNumber:[0-9]+}", "uploadId", "{uploadId:.*}")
|
bucket.Methods("PUT").Path("/{object:.+}").HeadersRegexp("X-Amz-Copy-Source", ".*?(\\/|%2F).*?").HandlerFunc(s3a.iam.Auth(s3a.CopyObjectPartHandler, ACTION_WRITE)).Queries("partNumber", "{partNumber:[0-9]+}", "uploadId", "{uploadId:.*}")
|
||||||
// PutObjectPart
|
// PutObjectPart
|
||||||
bucket.Methods("PUT").Path("/{object:.+}").HandlerFunc(s3a.PutObjectPartHandler).Queries("partNumber", "{partNumber:[0-9]+}", "uploadId", "{uploadId:.*}")
|
bucket.Methods("PUT").Path("/{object:.+}").HandlerFunc(s3a.iam.Auth(s3a.PutObjectPartHandler, ACTION_WRITE)).Queries("partNumber", "{partNumber:[0-9]+}", "uploadId", "{uploadId:.*}")
|
||||||
// CompleteMultipartUpload
|
// CompleteMultipartUpload
|
||||||
bucket.Methods("POST").Path("/{object:.+}").HandlerFunc(s3a.CompleteMultipartUploadHandler).Queries("uploadId", "{uploadId:.*}")
|
bucket.Methods("POST").Path("/{object:.+}").HandlerFunc(s3a.iam.Auth(s3a.CompleteMultipartUploadHandler, ACTION_WRITE)).Queries("uploadId", "{uploadId:.*}")
|
||||||
// NewMultipartUpload
|
// NewMultipartUpload
|
||||||
bucket.Methods("POST").Path("/{object:.+}").HandlerFunc(s3a.NewMultipartUploadHandler).Queries("uploads", "")
|
bucket.Methods("POST").Path("/{object:.+}").HandlerFunc(s3a.iam.Auth(s3a.NewMultipartUploadHandler, ACTION_WRITE)).Queries("uploads", "")
|
||||||
// AbortMultipartUpload
|
// AbortMultipartUpload
|
||||||
bucket.Methods("DELETE").Path("/{object:.+}").HandlerFunc(s3a.AbortMultipartUploadHandler).Queries("uploadId", "{uploadId:.*}")
|
bucket.Methods("DELETE").Path("/{object:.+}").HandlerFunc(s3a.iam.Auth(s3a.AbortMultipartUploadHandler, ACTION_WRITE)).Queries("uploadId", "{uploadId:.*}")
|
||||||
// ListObjectParts
|
// ListObjectParts
|
||||||
bucket.Methods("GET").Path("/{object:.+}").HandlerFunc(s3a.ListObjectPartsHandler).Queries("uploadId", "{uploadId:.*}")
|
bucket.Methods("GET").Path("/{object:.+}").HandlerFunc(s3a.iam.Auth(s3a.ListObjectPartsHandler, ACTION_WRITE)).Queries("uploadId", "{uploadId:.*}")
|
||||||
// ListMultipartUploads
|
// ListMultipartUploads
|
||||||
bucket.Methods("GET").HandlerFunc(s3a.ListMultipartUploadsHandler).Queries("uploads", "")
|
bucket.Methods("GET").HandlerFunc(s3a.iam.Auth(s3a.ListMultipartUploadsHandler, ACTION_WRITE)).Queries("uploads", "")
|
||||||
|
|
||||||
// CopyObject
|
// CopyObject
|
||||||
bucket.Methods("PUT").Path("/{object:.+}").HeadersRegexp("X-Amz-Copy-Source", ".*?(\\/|%2F).*?").HandlerFunc(s3a.CopyObjectHandler)
|
bucket.Methods("PUT").Path("/{object:.+}").HeadersRegexp("X-Amz-Copy-Source", ".*?(\\/|%2F).*?").HandlerFunc(s3a.iam.Auth(s3a.CopyObjectHandler, ACTION_WRITE))
|
||||||
// PutObject
|
// PutObject
|
||||||
bucket.Methods("PUT").Path("/{object:.+}").HandlerFunc(s3a.PutObjectHandler)
|
bucket.Methods("PUT").Path("/{object:.+}").HandlerFunc(s3a.iam.Auth(s3a.PutObjectHandler, ACTION_WRITE))
|
||||||
// PutBucket
|
// PutBucket
|
||||||
bucket.Methods("PUT").HandlerFunc(s3a.PutBucketHandler)
|
bucket.Methods("PUT").HandlerFunc(s3a.iam.Auth(s3a.PutBucketHandler, ACTION_ADMIN))
|
||||||
|
|
||||||
// DeleteObject
|
// DeleteObject
|
||||||
bucket.Methods("DELETE").Path("/{object:.+}").HandlerFunc(s3a.DeleteObjectHandler)
|
bucket.Methods("DELETE").Path("/{object:.+}").HandlerFunc(s3a.iam.Auth(s3a.DeleteObjectHandler, ACTION_WRITE))
|
||||||
// DeleteBucket
|
// DeleteBucket
|
||||||
bucket.Methods("DELETE").HandlerFunc(s3a.DeleteBucketHandler)
|
bucket.Methods("DELETE").HandlerFunc(s3a.iam.Auth(s3a.DeleteBucketHandler, ACTION_WRITE))
|
||||||
|
|
||||||
// ListObjectsV2
|
// ListObjectsV2
|
||||||
bucket.Methods("GET").HandlerFunc(s3a.ListObjectsV2Handler).Queries("list-type", "2")
|
bucket.Methods("GET").HandlerFunc(s3a.iam.Auth(s3a.ListObjectsV2Handler, ACTION_READ)).Queries("list-type", "2")
|
||||||
// GetObject, but directory listing is not supported
|
// GetObject, but directory listing is not supported
|
||||||
bucket.Methods("GET").Path("/{object:.+}").HandlerFunc(s3a.GetObjectHandler)
|
bucket.Methods("GET").Path("/{object:.+}").HandlerFunc(s3a.iam.Auth(s3a.GetObjectHandler, ACTION_READ))
|
||||||
// ListObjectsV1 (Legacy)
|
// ListObjectsV1 (Legacy)
|
||||||
bucket.Methods("GET").HandlerFunc(s3a.ListObjectsV1Handler)
|
bucket.Methods("GET").HandlerFunc(s3a.iam.Auth(s3a.ListObjectsV1Handler, ACTION_READ))
|
||||||
|
|
||||||
// DeleteMultipleObjects
|
// DeleteMultipleObjects
|
||||||
bucket.Methods("POST").HandlerFunc(s3a.DeleteMultipleObjectsHandler).Queries("delete", "")
|
bucket.Methods("POST").HandlerFunc(s3a.iam.Auth(s3a.DeleteMultipleObjectsHandler, ACTION_WRITE)).Queries("delete", "")
|
||||||
/*
|
/*
|
||||||
|
|
||||||
// not implemented
|
// not implemented
|
||||||
|
@ -102,7 +106,7 @@ func (s3a *S3ApiServer) registerRouter(router *mux.Router) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// ListBuckets
|
// ListBuckets
|
||||||
apiRouter.Methods("GET").Path("/").HandlerFunc(s3a.ListBucketsHandler)
|
apiRouter.Methods("GET").Path("/").HandlerFunc(s3a.iam.Auth(s3a.ListBucketsHandler, ACTION_ADMIN))
|
||||||
|
|
||||||
// NotFound
|
// NotFound
|
||||||
apiRouter.NotFoundHandler = http.HandlerFunc(notFoundHandler)
|
apiRouter.NotFoundHandler = http.HandlerFunc(notFoundHandler)
|
||||||
|
|
Loading…
Reference in a new issue