Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ require (
github.com/google/uuid v1.6.0
github.com/googleapis/gax-go/v2 v2.12.1
github.com/hashicorp/go-multierror v1.1.1
github.com/m-mizutani/bqs v0.0.2-0.20240220024557-5c1b9395c874
github.com/m-mizutani/bqs v0.0.3
github.com/m-mizutani/clog v0.0.4
github.com/m-mizutani/goerr v0.1.11
github.com/m-mizutani/gt v0.0.8
Expand Down
2 changes: 2 additions & 0 deletions go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,8 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/m-mizutani/bqs v0.0.2-0.20240220024557-5c1b9395c874 h1:FAmlPk310af5F5O2Le3Svzz9tTTjVn7+iOjPNGOFHiM=
github.com/m-mizutani/bqs v0.0.2-0.20240220024557-5c1b9395c874/go.mod h1:SLwcXCE84JPSQA0I2hsE0rCQ3wVoc5XgYrRWdpNoLPw=
github.com/m-mizutani/bqs v0.0.3 h1:DThn+TGMT9T9aDrMBhyH89QGvXbmJX0T32MI7TeZBVQ=
github.com/m-mizutani/bqs v0.0.3/go.mod h1:SLwcXCE84JPSQA0I2hsE0rCQ3wVoc5XgYrRWdpNoLPw=
github.com/m-mizutani/clog v0.0.4 h1:6hY5CzHwNS4zuJhF6puazYPtGeaEEGIbrD4Ccimyaow=
github.com/m-mizutani/clog v0.0.4/go.mod h1:a2J7BlnXOkaMQ0fNeDBG3IyyyWnCnSKYH8ltHFNDcHE=
github.com/m-mizutani/goerr v0.1.11 h1:noTEk8jNOVl/ST/Qfn0q7lMA13/ygzyl1PxaD4hHti4=
Expand Down
74 changes: 37 additions & 37 deletions pkg/domain/model/bigquery.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,48 +7,48 @@ import (
)

type LoadLog struct {
ID types.RequestID
StartedAt time.Time
FinishedAt time.Time
Success bool
Sources []*SourceLog
Ingests []*IngestLog
Error string
ID types.RequestID `json:"id" bigquery:"id"`
StartedAt time.Time `json:"started_at" bigquery:"started_at"`
FinishedAt time.Time `json:"finished_at" bigquery:"finished_at"`
Success bool `json:"success" bigquery:"success"`
Sources []*SourceLog `json:"sources" bigquery:"sources"`
Ingests []*IngestLog `json:"ingests" bigquery:"ingests"`
Error string `json:"error" bigquery:"error"`
}

type SourceLog struct {
CS *CloudStorageObject
Source
RowCount int
StartedAt time.Time
FinishedAt time.Time
Success bool
CS *CloudStorageObject `json:"cs" bigquery:"cs"`
Source Source `json:"source" bigquery:"source"`
RowCount int `json:"row_count" bigquery:"row_count"`
StartedAt time.Time `json:"started_at" bigquery:"started_at"`
FinishedAt time.Time `json:"finished_at" bigquery:"finished_at"`
Success bool `json:"success" bigquery:"success"`
}

type IngestLog struct {
ID types.IngestID
StartedAt time.Time
FinishedAt time.Time
ObjectSchema types.ObjectSchema
DatasetID types.BQDatasetID
TableID types.BQTableID
TableSchema string
LogCount int
Success bool
ID types.IngestID `json:"id" bigquery:"id"`
StartedAt time.Time `json:"started_at" bigquery:"started_at"`
FinishedAt time.Time `json:"finished_at" bigquery:"finished_at"`
ObjectSchema types.ObjectSchema `json:"object_schema" bigquery:"object_schema"`
DatasetID types.BQDatasetID `json:"dataset_id" bigquery:"dataset_id"`
TableID types.BQTableID `json:"table_id" bigquery:"table_id"`
TableSchema string `json:"table_schema" bigquery:"table_schema"`
LogCount int `json:"log_count" bigquery:"log_count"`
Success bool `json:"success" bigquery:"success"`
}

type LoadLogRaw struct {
LoadLog
StartedAt int64
FinishedAt int64
Ingests []*IngestLogRaw
Sources []*SourceLogRaw
StartedAt int64 `json:"started_at" bigquery:"started_at"`
FinishedAt int64 `json:"finished_at" bigquery:"finished_at"`
Ingests []*IngestLogRaw `json:"ingests" bigquery:"ingests"`
Sources []*SourceLogRaw `json:"sources" bigquery:"sources"`
}

type SourceLogRaw struct {
SourceLog
StartedAt int64
FinishedAt int64
StartedAt int64 `json:"started_at" bigquery:"started_at"`
FinishedAt int64 `json:"finished_at" bigquery:"finished_at"`
}

func (x *SourceLog) Raw() *SourceLogRaw {
Expand All @@ -61,8 +61,8 @@ func (x *SourceLog) Raw() *SourceLogRaw {

type IngestLogRaw struct {
IngestLog
StartedAt int64
FinishedAt int64
StartedAt int64 `json:"started_at" bigquery:"started_at"`
FinishedAt int64 `json:"finished_at" bigquery:"finished_at"`
}

func (x *IngestLog) Raw() *IngestLogRaw {
Expand Down Expand Up @@ -96,11 +96,11 @@ func (x *LoadLog) Raw() *LoadLogRaw {

type LogRecord struct {
// NOTICE: Must update LogRecordRaw also when adding new fields to LogRecord
ID types.LogID
IngestID types.IngestID
Timestamp time.Time
IngestedAt time.Time
Data any
ID types.LogID `json:"id" bigquery:"id"`
IngestID types.IngestID `json:"ingest_id" bigquery:"ingest_id"`
Timestamp time.Time `json:"timestamp" bigquery:"timestamp"`
IngestedAt time.Time `json:"ingested_at" bigquery:"ingested_at"`
Data any `json:"data" bigquery:"data"`
}

func (x LogRecord) Raw() *LogRecordRaw {
Expand All @@ -114,8 +114,8 @@ func (x LogRecord) Raw() *LogRecordRaw {
// LogRecordRaw is replaced LogRecord with Timestamp from time.Time to int64. BigQuery Storage Write API requires converting data to protocol buffer. But adapt.StorageSchemaToProto2Descriptor is not supported for time.Time. It uses int64 for timestamp instead of time.Time. So, LogRecordRaw is used for only insertion by BigQuery Storage Write API.
type LogRecordRaw struct {
LogRecord
Timestamp int64
IngestedAt int64
Timestamp int64 `json:"timestamp" bigquery:"timestamp"`
IngestedAt int64 `json:"ingested_at" bigquery:"ingested_at"`
}

type LogRecordSet map[BigQueryDest][]*LogRecord
Expand Down
6 changes: 3 additions & 3 deletions pkg/domain/model/policy.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@ type EventPolicyOutput struct {

type Source struct {
// Source object information
Parser types.ObjectParser `json:"parser"`
Schema types.ObjectSchema `json:"schema"`
Compress types.ObjectCompress `json:"compress"`
Parser types.ObjectParser `json:"parser" bigquery:"parser"`
Schema types.ObjectSchema `json:"schema" bigquery:"schema"`
Compress types.ObjectCompress `json:"compress" bigquery:"compress"`
}

func (x Source) Validate() error {
Expand Down
18 changes: 9 additions & 9 deletions pkg/domain/model/usecase.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,23 +17,23 @@ type LoadRequest struct {
}

type Object struct {
CS *CloudStorageObject `json:"cs,omitempty"`
Size *int64 `json:"size,omitempty"`
CreatedAt *int64 `json:"created_at"`
Digests []Digest `json:"digests"`
CS *CloudStorageObject `json:"cs,omitempty" bigquery:"cs"`
Size *int64 `json:"size,omitempty" bigquery:"size"`
CreatedAt *int64 `json:"created_at" bigquery:"created_at"`
Digests []Digest `json:"digests" bigquery:"digests"`

// Data is original notification data, such as CloudStorageEvent
Data any `json:"data"`
Data any `json:"data" bigquery:"-"`
}

type CloudStorageObject struct {
Bucket types.CSBucket `json:"bucket"`
Name types.CSObjectID `json:"name"`
Bucket types.CSBucket `json:"bucket" bigquery:"bucket"`
Name types.CSObjectID `json:"name" bigquery:"name"`
}

type Digest struct {
Alg string `json:"alg"`
Value string `json:"value"`
Alg string `json:"alg" bigquery:"alg"`
Value string `json:"value" bigquery:"value"`
}

func NewObjectFromCloudStorageAttrs(attrs *storage.ObjectAttrs) Object {
Expand Down
9 changes: 7 additions & 2 deletions pkg/usecase/bigquery.go
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,12 @@ func inferSchema[T any](data []T) (bigquery.Schema, error) {

func setupLoadLogTable(ctx context.Context, bq interfaces.BigQuery, meta *model.MetadataConfig) (bigquery.Schema, error) {
schema, err := bqs.Infer(&model.LoadLog{
Sources: []*model.SourceLog{{}},
Sources: []*model.SourceLog{
{
CS: &model.CloudStorageObject{},
Source: model.Source{},
},
},
Ingests: []*model.IngestLog{{}},
})
if err != nil {
Expand All @@ -68,7 +73,7 @@ func setupLoadLogTable(ctx context.Context, bq interfaces.BigQuery, meta *model.
md := &bigquery.TableMetadata{
Schema: schema,
TimePartitioning: &bigquery.TimePartitioning{
Field: "StartedAt",
Field: "started_at",
Type: bigquery.MonthPartitioningType,
},
}
Expand Down