new IP allocator and add postgres to integration tests. (#1756)
This commit is contained in:
parent
f581d4d9c0
commit
384ca03208
119 changed files with 3686 additions and 443 deletions
|
@ -72,9 +72,9 @@ database:
|
|||
sqlite.path: /tmp/integration_test_db.sqlite3
|
||||
ephemeral_node_inactivity_timeout: 30m
|
||||
node_update_check_interval: 10s
|
||||
ip_prefixes:
|
||||
- fd7a:115c:a1e0::/48
|
||||
- 100.64.0.0/10
|
||||
prefixes:
|
||||
v6: fd7a:115c:a1e0::/48
|
||||
v4: 100.64.0.0/10
|
||||
dns_config:
|
||||
base_domain: headscale.net
|
||||
magic_dns: true
|
||||
|
@ -115,7 +115,8 @@ func DefaultConfigEnv() map[string]string {
|
|||
"HEADSCALE_DATABASE_SQLITE_PATH": "/tmp/integration_test_db.sqlite3",
|
||||
"HEADSCALE_EPHEMERAL_NODE_INACTIVITY_TIMEOUT": "30m",
|
||||
"HEADSCALE_NODE_UPDATE_CHECK_INTERVAL": "10s",
|
||||
"HEADSCALE_IP_PREFIXES": "fd7a:115c:a1e0::/48 100.64.0.0/10",
|
||||
"HEADSCALE_PREFIXES_V4": "100.64.0.0/10",
|
||||
"HEADSCALE_PREFIXES_V6": "fd7a:115c:a1e0::/48",
|
||||
"HEADSCALE_DNS_CONFIG_BASE_DOMAIN": "headscale.net",
|
||||
"HEADSCALE_DNS_CONFIG_MAGIC_DNS": "true",
|
||||
"HEADSCALE_DNS_CONFIG_DOMAINS": "",
|
||||
|
|
|
@ -56,6 +56,8 @@ type HeadscaleInContainer struct {
|
|||
container *dockertest.Resource
|
||||
network *dockertest.Network
|
||||
|
||||
pgContainer *dockertest.Resource
|
||||
|
||||
// optional config
|
||||
port int
|
||||
extraPorts []string
|
||||
|
@ -65,6 +67,7 @@ type HeadscaleInContainer struct {
|
|||
tlsCert []byte
|
||||
tlsKey []byte
|
||||
filesInContainer []fileInContainer
|
||||
postgres bool
|
||||
}
|
||||
|
||||
// Option represent optional settings that can be given to a
|
||||
|
@ -162,6 +165,14 @@ func WithFileInContainer(path string, contents []byte) Option {
|
|||
}
|
||||
}
|
||||
|
||||
// WithPostgres spins up a Postgres container and
|
||||
// sets it as the main database.
|
||||
func WithPostgres() Option {
|
||||
return func(hsic *HeadscaleInContainer) {
|
||||
hsic.postgres = true
|
||||
}
|
||||
}
|
||||
|
||||
// New returns a new HeadscaleInContainer instance.
|
||||
func New(
|
||||
pool *dockertest.Pool,
|
||||
|
@ -209,6 +220,33 @@ func New(
|
|||
ContextDir: dockerContextPath,
|
||||
}
|
||||
|
||||
if hsic.postgres {
|
||||
hsic.env["HEADSCALE_DATABASE_TYPE"] = "postgres"
|
||||
hsic.env["HEADSCALE_DATABASE_POSTGRES_HOST"] = fmt.Sprintf("postgres-%s", hash)
|
||||
hsic.env["HEADSCALE_DATABASE_POSTGRES_USER"] = "headscale"
|
||||
hsic.env["HEADSCALE_DATABASE_POSTGRES_PASS"] = "headscale"
|
||||
hsic.env["HEADSCALE_DATABASE_POSTGRES_NAME"] = "headscale"
|
||||
delete(hsic.env, "HEADSCALE_DATABASE_SQLITE_PATH")
|
||||
|
||||
pg, err := pool.RunWithOptions(
|
||||
&dockertest.RunOptions{
|
||||
Name: fmt.Sprintf("postgres-%s", hash),
|
||||
Repository: "postgres",
|
||||
Tag: "latest",
|
||||
Networks: []*dockertest.Network{network},
|
||||
Env: []string{
|
||||
"POSTGRES_USER=headscale",
|
||||
"POSTGRES_PASSWORD=headscale",
|
||||
"POSTGRES_DB=headscale",
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("starting postgres container: %w", err)
|
||||
}
|
||||
|
||||
hsic.pgContainer = pg
|
||||
}
|
||||
|
||||
env := []string{
|
||||
"HEADSCALE_PROFILING_ENABLED=1",
|
||||
"HEADSCALE_PROFILING_PATH=/tmp/profile",
|
||||
|
@ -348,12 +386,20 @@ func (t *HeadscaleInContainer) Shutdown() error {
|
|||
)
|
||||
}
|
||||
|
||||
err = t.SaveDatabase("/tmp/control")
|
||||
if err != nil {
|
||||
log.Printf(
|
||||
"Failed to save database from control: %s",
|
||||
fmt.Errorf("failed to save database from control: %w", err),
|
||||
)
|
||||
// We dont have a database to save if we use postgres
|
||||
if !t.postgres {
|
||||
err = t.SaveDatabase("/tmp/control")
|
||||
if err != nil {
|
||||
log.Printf(
|
||||
"Failed to save database from control: %s",
|
||||
fmt.Errorf("failed to save database from control: %w", err),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup postgres container if enabled.
|
||||
if t.postgres {
|
||||
t.pool.Purge(t.pgContainer)
|
||||
}
|
||||
|
||||
return t.pool.Purge(t.container)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue