Skip to content

Commit

Permalink
Merge branch 'master' into feat/upgrade
Browse files Browse the repository at this point in the history
  • Loading branch information
vlastahajek committed Sep 28, 2020
2 parents 35deb59 + bde1209 commit 34ba3de
Show file tree
Hide file tree
Showing 22 changed files with 394 additions and 112 deletions.
5 changes: 0 additions & 5 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -472,11 +472,6 @@ jobs:
- install_rust_compiler
- install_release_tools
- run: make protoc # installs protoc
- run:
name: import GPG key
command: |
echo -e "$GPG_KEY" > private.key
gpg --batch --import private.key
- run:
name: "Build release"
command: make release
Expand Down
7 changes: 0 additions & 7 deletions .goreleaser.yml
Original file line number Diff line number Diff line change
Expand Up @@ -50,13 +50,6 @@ builds:
hooks:
pre: make generate

signs:
-
signature: "${artifact}.asc"
cmd: gpg
args: ["--armor", "--detach-sign", "${artifact}"]
artifacts: all

nfpms:
-
id: "influxdata"
Expand Down
11 changes: 7 additions & 4 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,21 +7,22 @@ release includes significant breaking changes.

**Upgrading from previous beta builds of `influxd` is not supported**

In order to continue using `influxd` betas, users will be required to move all existing
data out of their `~/.influxdbv2` (or equivalent) path, including `influxd.bolt`. This
In order to continue using `influxd` betas, users will be required to move all existing
data out of their `~/.influxdbv2` (or equivalent) path, including `influxd.bolt`. This
means all existing dashboards, tasks, integrations, alerts, users and tokens will need to
be recreated. The `influx export all` command may be used to export and re-import most
of this data.

At this time, there is no tooling to convert existing time series data from previous
At this time, there is no tooling to convert existing time series data from previous
beta releases. If data from a prior beta release is found, `influxd` will refuse to start.

We have also changed the default port of InfluxDB from 9999 back to 8086. If you still would like
We have also changed the default port of InfluxDB from 9999 back to 8086. If you still would like
to run on port 9999, you can start influxd with the `--http-bind-address` option. You will also
need to update any InfluxDB CLI config profiles with the new port number.

1. [19446](https://github.com/influxdata/influxdb/pull/19446): Port TSM1 storage engine
1. [19494](https://github.com/influxdata/influxdb/pull/19494): Changing the default port from 9999 to 8086
1. [19636](https://github.com/influxdata/influxdb/pull/19636): Disable unimplemented delete with predicate API

### Features

Expand All @@ -35,13 +36,15 @@ need to update any InfluxDB CLI config profiles with the new port number.
1. [19506](https://github.com/influxdata/influxdb/pull/19506): Add TSM 1.x storage options as flags
1. [19508](https://github.com/influxdata/influxdb/pull/19508): Add subset of InfluxQL coordinator options as flags
1. [19457](https://github.com/influxdata/influxdb/pull/19457): Add ability to export resources by name via the CLI
1. [19640](https://github.com/influxdata/influxdb/pull/19640): Turn on Community Templates

### Bug Fixes

1. [19331](https://github.com/influxdata/influxdb/pull/19331): Add description to auth influx command outputs.
1. [19392](https://github.com/influxdata/influxdb/pull/19392): Include the edge of the boundary we are observing.
1. [19453](https://github.com/influxdata/influxdb/pull/19453): Warn about duplicate tag names during influx write csv.
1. [19466](https://github.com/influxdata/influxdb/pull/19466): Do not override existing line part in group annotation.
1. [19637](https://github.com/influxdata/influxdb/pull/19637): Added PATCH to the list of allowed methods

## v2.0.0-beta.16 [2020-08-07]

Expand Down
1 change: 0 additions & 1 deletion cmd/influxd/launcher/engine.go
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,6 @@ func (t *TemporaryEngine) SeriesCardinality(orgID, bucketID influxdb.ID) int64 {
// DeleteBucketRangePredicate will delete a bucket from the range and predicate.
func (t *TemporaryEngine) DeleteBucketRangePredicate(ctx context.Context, orgID, bucketID influxdb.ID, min, max int64, pred influxdb.Predicate) error {
return t.engine.DeleteBucketRangePredicate(ctx, orgID, bucketID, min, max, pred)

}

func (t *TemporaryEngine) CreateBucket(ctx context.Context, b *influxdb.Bucket) error {
Expand Down
1 change: 1 addition & 0 deletions errors.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import (
// Any time this set of constants changes, you must also update the swagger for Error.properties.code.enum.
const (
EInternal = "internal error"
ENotImplemented = "not implemented"
ENotFound = "not found"
EConflict = "conflict" // action cannot be performed
EInvalid = "invalid" // validation failed
Expand Down
4 changes: 2 additions & 2 deletions flags.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,9 @@
- name: Community Templates
description: Replace current template uploading functionality with community driven templates
key: communityTemplates
default: false
default: true
expose: true
contact: Bucky, Johnny Steenbergen (Berg)
contact: Bucky
lifetime: permanent

- name: Frontend Example
Expand Down
20 changes: 6 additions & 14 deletions http/delete_handler.go
Original file line number Diff line number Diff line change
Expand Up @@ -114,24 +114,16 @@ func (h *DeleteHandler) handleDelete(w http.ResponseWriter, r *http.Request) {
return
}

// send delete points request to storage
err = h.DeleteService.DeleteBucketRangePredicate(ctx,
dr.Org.ID,
dr.Bucket.ID,
dr.Start,
dr.Stop,
dr.Predicate,
)
if err != nil {
h.HandleHTTPError(ctx, err, w)
return
}
h.HandleHTTPError(r.Context(), &influxdb.Error{
Code: influxdb.ENotImplemented,
Op: "http/handleDelete",
Msg: "Not implemented",
}, w)

h.log.Debug("Deleted",
zap.String("orgID", fmt.Sprint(dr.Org.ID.String())),
zap.String("buketID", fmt.Sprint(dr.Bucket.ID.String())),
)

w.WriteHeader(http.StatusNoContent)
}

func decodeDeleteRequest(ctx context.Context, r *http.Request, orgSvc influxdb.OrganizationService, bucketSvc influxdb.BucketService) (*deleteRequest, error) {
Expand Down
4 changes: 2 additions & 2 deletions http/delete_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ func TestDelete(t *testing.T) {
},
},
wants: wants{
statusCode: http.StatusNoContent,
statusCode: http.StatusNotImplemented,
body: ``,
},
},
Expand Down Expand Up @@ -331,7 +331,7 @@ func TestDelete(t *testing.T) {
},
},
wants: wants{
statusCode: http.StatusNoContent,
statusCode: http.StatusNotImplemented,
body: ``,
},
},
Expand Down
4 changes: 2 additions & 2 deletions kit/feature/list.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions kit/transport/http/error_handler.go
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@ func ErrorCodeToStatusCode(ctx context.Context, code string) int {
// influxDBErrorToStatusCode is a mapping of ErrorCode to http status code.
var influxDBErrorToStatusCode = map[string]int{
influxdb.EInternal: http.StatusInternalServerError,
influxdb.ENotImplemented: http.StatusNotImplemented,
influxdb.EInvalid: http.StatusBadRequest,
influxdb.EUnprocessableEntity: http.StatusUnprocessableEntity,
influxdb.EEmptyValue: http.StatusBadRequest,
Expand Down
2 changes: 1 addition & 1 deletion kit/transport/http/middleware.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ func SetCORS(next http.Handler) http.Handler {
}
if r.Method == http.MethodOptions {
// allow and stop processing in pre-flight requests
w.Header().Set("Access-Control-Allow-Methods", "POST, GET, OPTIONS, PUT, DELETE")
w.Header().Set("Access-Control-Allow-Methods", "POST, GET, OPTIONS, PUT, DELETE, PATCH")
w.Header().Set("Access-Control-Allow-Headers", "Accept, Content-Type, Content-Length, Accept-Encoding, Authorization, User-Agent")
w.WriteHeader(http.StatusNoContent)
return
Expand Down
11 changes: 8 additions & 3 deletions pkg/csv2lp/csv2lp.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,8 @@ func CreateRowColumnError(line int, columnLabel string, err error) CsvLineError
type CsvToLineReader struct {
// csv reading
csv *csv.Reader
// lineReader is used to report line number of the last read CSV line
lineReader *LineReader
// Table collects information about used columns
Table CsvTable
// LineNumber represents line number of csv.Reader, 1 is the first
Expand Down Expand Up @@ -100,8 +102,8 @@ func (state *CsvToLineReader) Read(p []byte) (n int, err error) {
// state3: fill buffer with data to read from
for {
// Read each record from csv
state.LineNumber++
row, err := state.csv.Read()
state.LineNumber = state.lineReader.LastLineNumber
if parseError, ok := err.(*csv.ParseError); ok && parseError.Err == csv.ErrFieldCount {
// every row can have different number of columns
err = nil
Expand Down Expand Up @@ -150,9 +152,12 @@ func (state *CsvToLineReader) Read(p []byte) (n int, err error) {

// CsvToLineProtocol transforms csv data into line protocol data
func CsvToLineProtocol(reader io.Reader) *CsvToLineReader {
csv := csv.NewReader(reader)
lineReader := NewLineReader(reader)
lineReader.LineNumber = 1 // start counting from 1
csv := csv.NewReader(lineReader)
csv.ReuseRecord = true
return &CsvToLineReader{
csv: csv,
csv: csv,
lineReader: lineReader,
}
}
73 changes: 73 additions & 0 deletions pkg/csv2lp/csv2lp_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -351,3 +351,76 @@ func Test_CsvLineError(t *testing.T) {
require.Equal(t, test.value, test.err.Error())
}
}

// Test_CsvToLineProtocol_lineNumbers tests that correct line numbers are reported
func Test_CsvToLineProtocol_lineNumbers(t *testing.T) {
var buf bytes.Buffer
log.SetOutput(&buf)
oldFlags := log.Flags()
log.SetFlags(0)
defer func() {
log.SetOutput(os.Stderr)
log.SetFlags(oldFlags)
}()

type ActualArguments = struct {
src *CsvToLineReader
err error
row []string
}
type ExpectedArguments = struct {
errorString string
row []string
}

// note: csv contains a field with newline and an extra empty lines
csv := `sep=;
_measurement;a|long:strict
;1
cpu;"2
.1"
cpu;3a
`
calledArgs := []ActualArguments{}
expectedArgs := []ExpectedArguments{
{
"line 4: column '_measurement': no measurement supplied",
[]string{"", "1"},
},
{
"line 6: column 'a': '2\n.1' cannot fit into long data type",
[]string{"cpu", "2\n.1"},
},
{
"line 8: column 'a': strconv.ParseInt:",
[]string{"cpu", "3a"},
},
}

reader := CsvToLineProtocol(strings.NewReader(csv)).SkipRowOnError(true)
reader.RowSkipped = func(src *CsvToLineReader, err error, _row []string) {
// make a copy of _row
row := make([]string, len(_row))
copy(row, _row)
// remember for comparison
calledArgs = append(calledArgs, ActualArguments{
src, err, row,
})
}
// read all the data
ioutil.ReadAll(reader)

out := buf.String()
require.Empty(t, out, "No log messages expected because RowSkipped handler is set")

require.Len(t, calledArgs, 3)
for i, expected := range expectedArgs {
require.Equal(t, reader, calledArgs[i].src)
require.Contains(t, calledArgs[i].err.Error(), expected.errorString)
require.Equal(t, expected.row, calledArgs[i].row)
}
// 8 lines were read
require.Equal(t, 8, reader.LineNumber)
}
12 changes: 8 additions & 4 deletions pkg/csv2lp/examples_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -149,8 +149,7 @@ cpu usage_user=2.7
},
{
"columnSeparator",
`
sep=;
`sep=;
m|measurement;available|boolean:y,Y:|n;dt|dateTime:number
test;nil;1
test;N;2
Expand All @@ -168,23 +167,28 @@ test available=true 5
},
}

func (example *csvExample) normalize() {
func (example *csvExample) normalize() rune {
for len(example.lp) > 0 && example.lp[0] == '\n' {
example.lp = example.lp[1:]
}
if strings.HasPrefix(example.csv, "sep=") {
return (rune)(example.csv[4])
}
return ','
}

// Test_Examples tests examples of README.md file herein
func Test_Examples(t *testing.T) {
for _, example := range examples {
example.normalize()
t.Run(example.name, func(t *testing.T) {
comma := example.normalize()
transformer := CsvToLineProtocol(strings.NewReader(example.csv))
transformer.SkipRowOnError(true)
result, err := ioutil.ReadAll(transformer)
if err != nil {
require.Nil(t, fmt.Sprintf("%s", err))
}
require.Equal(t, comma, transformer.Comma())
require.Equal(t, example.lp, string(result))
})
}
Expand Down
Loading

0 comments on commit 34ba3de

Please sign in to comment.