1. Packages
  2. Logdna Provider
  3. API Docs
  4. Archive
logdna 1.16.0 published on Monday, Apr 14, 2025 by logdna

logdna.Archive

Explore with Pulumi AI

# Resource: logdna.Archive

Manages LogDNA Archiving configuration for an account.

Example IBM COS Archive

import * as pulumi from "@pulumi/pulumi";
import * as logdna from "@pulumi/logdna";

const config = new logdna.Archive("config", {
    ibmConfig: {
        apikey: "key",
        bucket: "example",
        endpoint: "example.com",
        resourceinstanceid: "id",
    },
    integration: "ibm",
});
Copy
import pulumi
import pulumi_logdna as logdna

config = logdna.Archive("config",
    ibm_config={
        "apikey": "key",
        "bucket": "example",
        "endpoint": "example.com",
        "resourceinstanceid": "id",
    },
    integration="ibm")
Copy
package main

import (
	"github.com/pulumi/pulumi-terraform-provider/sdks/go/logdna/logdna"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := logdna.NewArchive(ctx, "config", &logdna.ArchiveArgs{
			IbmConfig: &logdna.ArchiveIbmConfigArgs{
				Apikey:             pulumi.String("key"),
				Bucket:             pulumi.String("example"),
				Endpoint:           pulumi.String("example.com"),
				Resourceinstanceid: pulumi.String("id"),
			},
			Integration: pulumi.String("ibm"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Logdna = Pulumi.Logdna;

return await Deployment.RunAsync(() => 
{
    var config = new Logdna.Archive("config", new()
    {
        IbmConfig = new Logdna.Inputs.ArchiveIbmConfigArgs
        {
            Apikey = "key",
            Bucket = "example",
            Endpoint = "example.com",
            Resourceinstanceid = "id",
        },
        Integration = "ibm",
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.logdna.Archive;
import com.pulumi.logdna.ArchiveArgs;
import com.pulumi.logdna.inputs.ArchiveIbmConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var config = new Archive("config", ArchiveArgs.builder()
            .ibmConfig(ArchiveIbmConfigArgs.builder()
                .apikey("key")
                .bucket("example")
                .endpoint("example.com")
                .resourceinstanceid("id")
                .build())
            .integration("ibm")
            .build());

    }
}
Copy
resources:
  config:
    type: logdna:Archive
    properties:
      ibmConfig:
        apikey: key
        bucket: example
        endpoint: example.com
        resourceinstanceid: id
      integration: ibm
Copy

Example AWS S3 Archive

import * as pulumi from "@pulumi/pulumi";
import * as logdna from "@pulumi/logdna";

const config = new logdna.Archive("config", {
    integration: "s3",
    s3Config: {
        bucket: "example",
    },
});
Copy
import pulumi
import pulumi_logdna as logdna

config = logdna.Archive("config",
    integration="s3",
    s3_config={
        "bucket": "example",
    })
Copy
package main

import (
	"github.com/pulumi/pulumi-terraform-provider/sdks/go/logdna/logdna"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := logdna.NewArchive(ctx, "config", &logdna.ArchiveArgs{
			Integration: pulumi.String("s3"),
			S3Config: &logdna.ArchiveS3ConfigArgs{
				Bucket: pulumi.String("example"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Logdna = Pulumi.Logdna;

return await Deployment.RunAsync(() => 
{
    var config = new Logdna.Archive("config", new()
    {
        Integration = "s3",
        S3Config = new Logdna.Inputs.ArchiveS3ConfigArgs
        {
            Bucket = "example",
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.logdna.Archive;
import com.pulumi.logdna.ArchiveArgs;
import com.pulumi.logdna.inputs.ArchiveS3ConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var config = new Archive("config", ArchiveArgs.builder()
            .integration("s3")
            .s3Config(ArchiveS3ConfigArgs.builder()
                .bucket("example")
                .build())
            .build());

    }
}
Copy
resources:
  config:
    type: logdna:Archive
    properties:
      integration: s3
      s3Config:
        bucket: example
Copy

Example Azure Blob Storage Archive

import * as pulumi from "@pulumi/pulumi";
import * as logdna from "@pulumi/logdna";

const config = new logdna.Archive("config", {
    azblobConfig: {
        accountkey: "example key",
        accountname: "example name",
    },
    integration: "azblob",
});
Copy
import pulumi
import pulumi_logdna as logdna

config = logdna.Archive("config",
    azblob_config={
        "accountkey": "example key",
        "accountname": "example name",
    },
    integration="azblob")
Copy
package main

import (
	"github.com/pulumi/pulumi-terraform-provider/sdks/go/logdna/logdna"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := logdna.NewArchive(ctx, "config", &logdna.ArchiveArgs{
			AzblobConfig: &logdna.ArchiveAzblobConfigArgs{
				Accountkey:  pulumi.String("example key"),
				Accountname: pulumi.String("example name"),
			},
			Integration: pulumi.String("azblob"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Logdna = Pulumi.Logdna;

return await Deployment.RunAsync(() => 
{
    var config = new Logdna.Archive("config", new()
    {
        AzblobConfig = new Logdna.Inputs.ArchiveAzblobConfigArgs
        {
            Accountkey = "example key",
            Accountname = "example name",
        },
        Integration = "azblob",
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.logdna.Archive;
import com.pulumi.logdna.ArchiveArgs;
import com.pulumi.logdna.inputs.ArchiveAzblobConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var config = new Archive("config", ArchiveArgs.builder()
            .azblobConfig(ArchiveAzblobConfigArgs.builder()
                .accountkey("example key")
                .accountname("example name")
                .build())
            .integration("azblob")
            .build());

    }
}
Copy
resources:
  config:
    type: logdna:Archive
    properties:
      azblobConfig:
        accountkey: example key
        accountname: example name
      integration: azblob
Copy

Example Google Cloud Services Archive

import * as pulumi from "@pulumi/pulumi";
import * as logdna from "@pulumi/logdna";

const config = new logdna.Archive("config", {
    gcsConfig: {
        bucket: "example",
        projectid: "id",
    },
    integration: "gcs",
});
Copy
import pulumi
import pulumi_logdna as logdna

config = logdna.Archive("config",
    gcs_config={
        "bucket": "example",
        "projectid": "id",
    },
    integration="gcs")
Copy
package main

import (
	"github.com/pulumi/pulumi-terraform-provider/sdks/go/logdna/logdna"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := logdna.NewArchive(ctx, "config", &logdna.ArchiveArgs{
			GcsConfig: &logdna.ArchiveGcsConfigArgs{
				Bucket:    pulumi.String("example"),
				Projectid: pulumi.String("id"),
			},
			Integration: pulumi.String("gcs"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Logdna = Pulumi.Logdna;

return await Deployment.RunAsync(() => 
{
    var config = new Logdna.Archive("config", new()
    {
        GcsConfig = new Logdna.Inputs.ArchiveGcsConfigArgs
        {
            Bucket = "example",
            Projectid = "id",
        },
        Integration = "gcs",
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.logdna.Archive;
import com.pulumi.logdna.ArchiveArgs;
import com.pulumi.logdna.inputs.ArchiveGcsConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var config = new Archive("config", ArchiveArgs.builder()
            .gcsConfig(ArchiveGcsConfigArgs.builder()
                .bucket("example")
                .projectid("id")
                .build())
            .integration("gcs")
            .build());

    }
}
Copy
resources:
  config:
    type: logdna:Archive
    properties:
      gcsConfig:
        bucket: example
        projectid: id
      integration: gcs
Copy

Example Digital Ocean Archive

import * as pulumi from "@pulumi/pulumi";
import * as logdna from "@pulumi/logdna";

const config = new logdna.Archive("config", {
    dosConfig: {
        accesskey: "key",
        endpoint: "example.com",
        secretkey: "key",
        space: "example",
    },
    integration: "dos",
});
Copy
import pulumi
import pulumi_logdna as logdna

config = logdna.Archive("config",
    dos_config={
        "accesskey": "key",
        "endpoint": "example.com",
        "secretkey": "key",
        "space": "example",
    },
    integration="dos")
Copy
package main

import (
	"github.com/pulumi/pulumi-terraform-provider/sdks/go/logdna/logdna"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := logdna.NewArchive(ctx, "config", &logdna.ArchiveArgs{
			DosConfig: &logdna.ArchiveDosConfigArgs{
				Accesskey: pulumi.String("key"),
				Endpoint:  pulumi.String("example.com"),
				Secretkey: pulumi.String("key"),
				Space:     pulumi.String("example"),
			},
			Integration: pulumi.String("dos"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Logdna = Pulumi.Logdna;

return await Deployment.RunAsync(() => 
{
    var config = new Logdna.Archive("config", new()
    {
        DosConfig = new Logdna.Inputs.ArchiveDosConfigArgs
        {
            Accesskey = "key",
            Endpoint = "example.com",
            Secretkey = "key",
            Space = "example",
        },
        Integration = "dos",
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.logdna.Archive;
import com.pulumi.logdna.ArchiveArgs;
import com.pulumi.logdna.inputs.ArchiveDosConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var config = new Archive("config", ArchiveArgs.builder()
            .dosConfig(ArchiveDosConfigArgs.builder()
                .accesskey("key")
                .endpoint("example.com")
                .secretkey("key")
                .space("example")
                .build())
            .integration("dos")
            .build());

    }
}
Copy
resources:
  config:
    type: logdna:Archive
    properties:
      dosConfig:
        accesskey: key
        endpoint: example.com
        secretkey: key
        space: example
      integration: dos
Copy

Example OpenStack Swift Archive

import * as pulumi from "@pulumi/pulumi";
import * as logdna from "@pulumi/logdna";

const config = new logdna.Archive("config", {
    integration: "swift",
    swiftConfig: {
        authurl: "example.com",
        expires: 5,
        password: "password",
        tenantname: "example",
        username: "example user",
    },
});
Copy
import pulumi
import pulumi_logdna as logdna

config = logdna.Archive("config",
    integration="swift",
    swift_config={
        "authurl": "example.com",
        "expires": 5,
        "password": "password",
        "tenantname": "example",
        "username": "example user",
    })
Copy
package main

import (
	"github.com/pulumi/pulumi-terraform-provider/sdks/go/logdna/logdna"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := logdna.NewArchive(ctx, "config", &logdna.ArchiveArgs{
			Integration: pulumi.String("swift"),
			SwiftConfig: &logdna.ArchiveSwiftConfigArgs{
				Authurl:    pulumi.String("example.com"),
				Expires:    pulumi.Float64(5),
				Password:   pulumi.String("password"),
				Tenantname: pulumi.String("example"),
				Username:   pulumi.String("example user"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Logdna = Pulumi.Logdna;

return await Deployment.RunAsync(() => 
{
    var config = new Logdna.Archive("config", new()
    {
        Integration = "swift",
        SwiftConfig = new Logdna.Inputs.ArchiveSwiftConfigArgs
        {
            Authurl = "example.com",
            Expires = 5,
            Password = "password",
            Tenantname = "example",
            Username = "example user",
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.logdna.Archive;
import com.pulumi.logdna.ArchiveArgs;
import com.pulumi.logdna.inputs.ArchiveSwiftConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var config = new Archive("config", ArchiveArgs.builder()
            .integration("swift")
            .swiftConfig(ArchiveSwiftConfigArgs.builder()
                .authurl("example.com")
                .expires(5)
                .password("password")
                .tenantname("example")
                .username("example user")
                .build())
            .build());

    }
}
Copy
resources:
  config:
    type: logdna:Archive
    properties:
      integration: swift
      swiftConfig:
        authurl: example.com
        expires: 5
        password: password
        tenantname: example
        username: example user
Copy

Create Archive Resource

Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

Constructor syntax

new Archive(name: string, args: ArchiveArgs, opts?: CustomResourceOptions);
@overload
def Archive(resource_name: str,
            args: ArchiveArgs,
            opts: Optional[ResourceOptions] = None)

@overload
def Archive(resource_name: str,
            opts: Optional[ResourceOptions] = None,
            integration: Optional[str] = None,
            archive_id: Optional[str] = None,
            azblob_config: Optional[ArchiveAzblobConfigArgs] = None,
            dos_config: Optional[ArchiveDosConfigArgs] = None,
            gcs_config: Optional[ArchiveGcsConfigArgs] = None,
            ibm_config: Optional[ArchiveIbmConfigArgs] = None,
            s3_config: Optional[ArchiveS3ConfigArgs] = None,
            swift_config: Optional[ArchiveSwiftConfigArgs] = None)
func NewArchive(ctx *Context, name string, args ArchiveArgs, opts ...ResourceOption) (*Archive, error)
public Archive(string name, ArchiveArgs args, CustomResourceOptions? opts = null)
public Archive(String name, ArchiveArgs args)
public Archive(String name, ArchiveArgs args, CustomResourceOptions options)
type: logdna:Archive
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

Parameters

name This property is required. string
The unique name of the resource.
args This property is required. ArchiveArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name This property is required. str
The unique name of the resource.
args This property is required. ArchiveArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name This property is required. string
The unique name of the resource.
args This property is required. ArchiveArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name This property is required. string
The unique name of the resource.
args This property is required. ArchiveArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name This property is required. String
The unique name of the resource.
args This property is required. ArchiveArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Constructor example

The following reference example uses placeholder values for all input properties.

var archiveResource = new Logdna.Archive("archiveResource", new()
{
    Integration = "string",
    ArchiveId = "string",
    AzblobConfig = new Logdna.Inputs.ArchiveAzblobConfigArgs
    {
        Accountkey = "string",
        Accountname = "string",
    },
    DosConfig = new Logdna.Inputs.ArchiveDosConfigArgs
    {
        Accesskey = "string",
        Endpoint = "string",
        Secretkey = "string",
        Space = "string",
    },
    GcsConfig = new Logdna.Inputs.ArchiveGcsConfigArgs
    {
        Bucket = "string",
        Projectid = "string",
    },
    IbmConfig = new Logdna.Inputs.ArchiveIbmConfigArgs
    {
        Apikey = "string",
        Bucket = "string",
        Endpoint = "string",
        Resourceinstanceid = "string",
    },
    S3Config = new Logdna.Inputs.ArchiveS3ConfigArgs
    {
        Bucket = "string",
    },
    SwiftConfig = new Logdna.Inputs.ArchiveSwiftConfigArgs
    {
        Authurl = "string",
        Password = "string",
        Tenantname = "string",
        Username = "string",
        Expires = 0,
    },
});
Copy
example, err := logdna.NewArchive(ctx, "archiveResource", &logdna.ArchiveArgs{
	Integration: pulumi.String("string"),
	ArchiveId:   pulumi.String("string"),
	AzblobConfig: &logdna.ArchiveAzblobConfigArgs{
		Accountkey:  pulumi.String("string"),
		Accountname: pulumi.String("string"),
	},
	DosConfig: &logdna.ArchiveDosConfigArgs{
		Accesskey: pulumi.String("string"),
		Endpoint:  pulumi.String("string"),
		Secretkey: pulumi.String("string"),
		Space:     pulumi.String("string"),
	},
	GcsConfig: &logdna.ArchiveGcsConfigArgs{
		Bucket:    pulumi.String("string"),
		Projectid: pulumi.String("string"),
	},
	IbmConfig: &logdna.ArchiveIbmConfigArgs{
		Apikey:             pulumi.String("string"),
		Bucket:             pulumi.String("string"),
		Endpoint:           pulumi.String("string"),
		Resourceinstanceid: pulumi.String("string"),
	},
	S3Config: &logdna.ArchiveS3ConfigArgs{
		Bucket: pulumi.String("string"),
	},
	SwiftConfig: &logdna.ArchiveSwiftConfigArgs{
		Authurl:    pulumi.String("string"),
		Password:   pulumi.String("string"),
		Tenantname: pulumi.String("string"),
		Username:   pulumi.String("string"),
		Expires:    pulumi.Float64(0),
	},
})
Copy
var archiveResource = new Archive("archiveResource", ArchiveArgs.builder()
    .integration("string")
    .archiveId("string")
    .azblobConfig(ArchiveAzblobConfigArgs.builder()
        .accountkey("string")
        .accountname("string")
        .build())
    .dosConfig(ArchiveDosConfigArgs.builder()
        .accesskey("string")
        .endpoint("string")
        .secretkey("string")
        .space("string")
        .build())
    .gcsConfig(ArchiveGcsConfigArgs.builder()
        .bucket("string")
        .projectid("string")
        .build())
    .ibmConfig(ArchiveIbmConfigArgs.builder()
        .apikey("string")
        .bucket("string")
        .endpoint("string")
        .resourceinstanceid("string")
        .build())
    .s3Config(ArchiveS3ConfigArgs.builder()
        .bucket("string")
        .build())
    .swiftConfig(ArchiveSwiftConfigArgs.builder()
        .authurl("string")
        .password("string")
        .tenantname("string")
        .username("string")
        .expires(0)
        .build())
    .build());
Copy
archive_resource = logdna.Archive("archiveResource",
    integration="string",
    archive_id="string",
    azblob_config={
        "accountkey": "string",
        "accountname": "string",
    },
    dos_config={
        "accesskey": "string",
        "endpoint": "string",
        "secretkey": "string",
        "space": "string",
    },
    gcs_config={
        "bucket": "string",
        "projectid": "string",
    },
    ibm_config={
        "apikey": "string",
        "bucket": "string",
        "endpoint": "string",
        "resourceinstanceid": "string",
    },
    s3_config={
        "bucket": "string",
    },
    swift_config={
        "authurl": "string",
        "password": "string",
        "tenantname": "string",
        "username": "string",
        "expires": 0,
    })
Copy
const archiveResource = new logdna.Archive("archiveResource", {
    integration: "string",
    archiveId: "string",
    azblobConfig: {
        accountkey: "string",
        accountname: "string",
    },
    dosConfig: {
        accesskey: "string",
        endpoint: "string",
        secretkey: "string",
        space: "string",
    },
    gcsConfig: {
        bucket: "string",
        projectid: "string",
    },
    ibmConfig: {
        apikey: "string",
        bucket: "string",
        endpoint: "string",
        resourceinstanceid: "string",
    },
    s3Config: {
        bucket: "string",
    },
    swiftConfig: {
        authurl: "string",
        password: "string",
        tenantname: "string",
        username: "string",
        expires: 0,
    },
});
Copy
type: logdna:Archive
properties:
    archiveId: string
    azblobConfig:
        accountkey: string
        accountname: string
    dosConfig:
        accesskey: string
        endpoint: string
        secretkey: string
        space: string
    gcsConfig:
        bucket: string
        projectid: string
    ibmConfig:
        apikey: string
        bucket: string
        endpoint: string
        resourceinstanceid: string
    integration: string
    s3Config:
        bucket: string
    swiftConfig:
        authurl: string
        expires: 0
        password: string
        tenantname: string
        username: string
Copy

Archive Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

The Archive resource accepts the following input properties:

Integration This property is required. string
string (Required) Archiving integration. Valid values are ibm, s3, azblob, gcs, dos, swift
ArchiveId string
AzblobConfig ArchiveAzblobConfig
DosConfig ArchiveDosConfig
GcsConfig ArchiveGcsConfig
IbmConfig ArchiveIbmConfig
S3Config ArchiveS3Config
SwiftConfig ArchiveSwiftConfig
Integration This property is required. string
string (Required) Archiving integration. Valid values are ibm, s3, azblob, gcs, dos, swift
ArchiveId string
AzblobConfig ArchiveAzblobConfigArgs
DosConfig ArchiveDosConfigArgs
GcsConfig ArchiveGcsConfigArgs
IbmConfig ArchiveIbmConfigArgs
S3Config ArchiveS3ConfigArgs
SwiftConfig ArchiveSwiftConfigArgs
integration This property is required. String
string (Required) Archiving integration. Valid values are ibm, s3, azblob, gcs, dos, swift
archiveId String
azblobConfig ArchiveAzblobConfig
dosConfig ArchiveDosConfig
gcsConfig ArchiveGcsConfig
ibmConfig ArchiveIbmConfig
s3Config ArchiveS3Config
swiftConfig ArchiveSwiftConfig
integration This property is required. string
string (Required) Archiving integration. Valid values are ibm, s3, azblob, gcs, dos, swift
archiveId string
azblobConfig ArchiveAzblobConfig
dosConfig ArchiveDosConfig
gcsConfig ArchiveGcsConfig
ibmConfig ArchiveIbmConfig
s3Config ArchiveS3Config
swiftConfig ArchiveSwiftConfig
integration This property is required. String
string (Required) Archiving integration. Valid values are ibm, s3, azblob, gcs, dos, swift
archiveId String
azblobConfig Property Map
dosConfig Property Map
gcsConfig Property Map
ibmConfig Property Map
s3Config Property Map
swiftConfig Property Map

Outputs

All input properties are implicitly available as output properties. Additionally, the Archive resource produces the following output properties:

Id string
The provider-assigned unique ID for this managed resource.
Id string
The provider-assigned unique ID for this managed resource.
id String
The provider-assigned unique ID for this managed resource.
id string
The provider-assigned unique ID for this managed resource.
id str
The provider-assigned unique ID for this managed resource.
id String
The provider-assigned unique ID for this managed resource.

Look up Existing Archive Resource

Get an existing Archive resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: ArchiveState, opts?: CustomResourceOptions): Archive
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        archive_id: Optional[str] = None,
        azblob_config: Optional[ArchiveAzblobConfigArgs] = None,
        dos_config: Optional[ArchiveDosConfigArgs] = None,
        gcs_config: Optional[ArchiveGcsConfigArgs] = None,
        ibm_config: Optional[ArchiveIbmConfigArgs] = None,
        integration: Optional[str] = None,
        s3_config: Optional[ArchiveS3ConfigArgs] = None,
        swift_config: Optional[ArchiveSwiftConfigArgs] = None) -> Archive
func GetArchive(ctx *Context, name string, id IDInput, state *ArchiveState, opts ...ResourceOption) (*Archive, error)
public static Archive Get(string name, Input<string> id, ArchiveState? state, CustomResourceOptions? opts = null)
public static Archive get(String name, Output<String> id, ArchiveState state, CustomResourceOptions options)
resources:  _:    type: logdna:Archive    get:      id: ${id}
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
archiveId String
azblobConfig Property Map
dosConfig Property Map
gcsConfig Property Map
ibmConfig Property Map
integration String
string (Required) Archiving integration. Valid values are ibm, s3, azblob, gcs, dos, swift
s3Config Property Map
swiftConfig Property Map

Supporting Types

ArchiveAzblobConfig
, ArchiveAzblobConfigArgs

Accountkey This property is required. string
string (Required) Azure Blob Storage account access key
Accountname This property is required. string
string (Required) Azure Blob Storage account name
Accountkey This property is required. string
string (Required) Azure Blob Storage account access key
Accountname This property is required. string
string (Required) Azure Blob Storage account name
accountkey This property is required. String
string (Required) Azure Blob Storage account access key
accountname This property is required. String
string (Required) Azure Blob Storage account name
accountkey This property is required. string
string (Required) Azure Blob Storage account access key
accountname This property is required. string
string (Required) Azure Blob Storage account name
accountkey This property is required. str
string (Required) Azure Blob Storage account access key
accountname This property is required. str
string (Required) Azure Blob Storage account name
accountkey This property is required. String
string (Required) Azure Blob Storage account access key
accountname This property is required. String
string (Required) Azure Blob Storage account name

ArchiveDosConfig
, ArchiveDosConfigArgs

Accesskey This property is required. string
string (Required) DigitalOcean Spaces API access key
Endpoint This property is required. string
string (Required) DigitalOcean Spaces (region) endpoint
Secretkey This property is required. string
string (Required) DigitalOcean Spaces API secret key
Space This property is required. string
string (Required) DigitalOcean Spaces storage "bucket" name
Accesskey This property is required. string
string (Required) DigitalOcean Spaces API access key
Endpoint This property is required. string
string (Required) DigitalOcean Spaces (region) endpoint
Secretkey This property is required. string
string (Required) DigitalOcean Spaces API secret key
Space This property is required. string
string (Required) DigitalOcean Spaces storage "bucket" name
accesskey This property is required. String
string (Required) DigitalOcean Spaces API access key
endpoint This property is required. String
string (Required) DigitalOcean Spaces (region) endpoint
secretkey This property is required. String
string (Required) DigitalOcean Spaces API secret key
space This property is required. String
string (Required) DigitalOcean Spaces storage "bucket" name
accesskey This property is required. string
string (Required) DigitalOcean Spaces API access key
endpoint This property is required. string
string (Required) DigitalOcean Spaces (region) endpoint
secretkey This property is required. string
string (Required) DigitalOcean Spaces API secret key
space This property is required. string
string (Required) DigitalOcean Spaces storage "bucket" name
accesskey This property is required. str
string (Required) DigitalOcean Spaces API access key
endpoint This property is required. str
string (Required) DigitalOcean Spaces (region) endpoint
secretkey This property is required. str
string (Required) DigitalOcean Spaces API secret key
space This property is required. str
string (Required) DigitalOcean Spaces storage "bucket" name
accesskey This property is required. String
string (Required) DigitalOcean Spaces API access key
endpoint This property is required. String
string (Required) DigitalOcean Spaces (region) endpoint
secretkey This property is required. String
string (Required) DigitalOcean Spaces API secret key
space This property is required. String
string (Required) DigitalOcean Spaces storage "bucket" name

ArchiveGcsConfig
, ArchiveGcsConfigArgs

Bucket This property is required. string
string (Required) Google Cloud Storage bucket name
Projectid This property is required. string
string (Required) Google Cloud project identifier
Bucket This property is required. string
string (Required) Google Cloud Storage bucket name
Projectid This property is required. string
string (Required) Google Cloud project identifier
bucket This property is required. String
string (Required) Google Cloud Storage bucket name
projectid This property is required. String
string (Required) Google Cloud project identifier
bucket This property is required. string
string (Required) Google Cloud Storage bucket name
projectid This property is required. string
string (Required) Google Cloud project identifier
bucket This property is required. str
string (Required) Google Cloud Storage bucket name
projectid This property is required. str
string (Required) Google Cloud project identifier
bucket This property is required. String
string (Required) Google Cloud Storage bucket name
projectid This property is required. String
string (Required) Google Cloud project identifier

ArchiveIbmConfig
, ArchiveIbmConfigArgs

Apikey This property is required. string
string (Required) IBM COS API key
Bucket This property is required. string
string (Required) IBM COS storage bucket name
Endpoint This property is required. string
string (Required) IBM COS public (region) endpoint
Resourceinstanceid This property is required. string
string (Required) IBM COS instance identifier
Apikey This property is required. string
string (Required) IBM COS API key
Bucket This property is required. string
string (Required) IBM COS storage bucket name
Endpoint This property is required. string
string (Required) IBM COS public (region) endpoint
Resourceinstanceid This property is required. string
string (Required) IBM COS instance identifier
apikey This property is required. String
string (Required) IBM COS API key
bucket This property is required. String
string (Required) IBM COS storage bucket name
endpoint This property is required. String
string (Required) IBM COS public (region) endpoint
resourceinstanceid This property is required. String
string (Required) IBM COS instance identifier
apikey This property is required. string
string (Required) IBM COS API key
bucket This property is required. string
string (Required) IBM COS storage bucket name
endpoint This property is required. string
string (Required) IBM COS public (region) endpoint
resourceinstanceid This property is required. string
string (Required) IBM COS instance identifier
apikey This property is required. str
string (Required) IBM COS API key
bucket This property is required. str
string (Required) IBM COS storage bucket name
endpoint This property is required. str
string (Required) IBM COS public (region) endpoint
resourceinstanceid This property is required. str
string (Required) IBM COS instance identifier
apikey This property is required. String
string (Required) IBM COS API key
bucket This property is required. String
string (Required) IBM COS storage bucket name
endpoint This property is required. String
string (Required) IBM COS public (region) endpoint
resourceinstanceid This property is required. String
string (Required) IBM COS instance identifier

ArchiveS3Config
, ArchiveS3ConfigArgs

Bucket This property is required. string
Bucket This property is required. string
bucket This property is required. String
bucket This property is required. string
bucket This property is required. str
bucket This property is required. String

ArchiveSwiftConfig
, ArchiveSwiftConfigArgs

Authurl This property is required. string
string (Required) OpenStack Swift authentication URL
Password This property is required. string
string (Required) OpenStack Swift user password
Tenantname This property is required. string

string (Required) OpenStack Swift tenant/project/account name

Note that the provided settings must be valid. The connection to the archiving integration will be validated before the configuration can be saved.

Username This property is required. string
string (Required) OpenStack Swift user name
Expires double
integer (Optional) OpenStack Swift storage object days till expiry
Authurl This property is required. string
string (Required) OpenStack Swift authentication URL
Password This property is required. string
string (Required) OpenStack Swift user password
Tenantname This property is required. string

string (Required) OpenStack Swift tenant/project/account name

Note that the provided settings must be valid. The connection to the archiving integration will be validated before the configuration can be saved.

Username This property is required. string
string (Required) OpenStack Swift user name
Expires float64
integer (Optional) OpenStack Swift storage object days till expiry
authurl This property is required. String
string (Required) OpenStack Swift authentication URL
password This property is required. String
string (Required) OpenStack Swift user password
tenantname This property is required. String

string (Required) OpenStack Swift tenant/project/account name

Note that the provided settings must be valid. The connection to the archiving integration will be validated before the configuration can be saved.

username This property is required. String
string (Required) OpenStack Swift user name
expires Double
integer (Optional) OpenStack Swift storage object days till expiry
authurl This property is required. string
string (Required) OpenStack Swift authentication URL
password This property is required. string
string (Required) OpenStack Swift user password
tenantname This property is required. string

string (Required) OpenStack Swift tenant/project/account name

Note that the provided settings must be valid. The connection to the archiving integration will be validated before the configuration can be saved.

username This property is required. string
string (Required) OpenStack Swift user name
expires number
integer (Optional) OpenStack Swift storage object days till expiry
authurl This property is required. str
string (Required) OpenStack Swift authentication URL
password This property is required. str
string (Required) OpenStack Swift user password
tenantname This property is required. str

string (Required) OpenStack Swift tenant/project/account name

Note that the provided settings must be valid. The connection to the archiving integration will be validated before the configuration can be saved.

username This property is required. str
string (Required) OpenStack Swift user name
expires float
integer (Optional) OpenStack Swift storage object days till expiry
authurl This property is required. String
string (Required) OpenStack Swift authentication URL
password This property is required. String
string (Required) OpenStack Swift user password
tenantname This property is required. String

string (Required) OpenStack Swift tenant/project/account name

Note that the provided settings must be valid. The connection to the archiving integration will be validated before the configuration can be saved.

username This property is required. String
string (Required) OpenStack Swift user name
expires Number
integer (Optional) OpenStack Swift storage object days till expiry

Import

Importing an existing configuration is supported:

$ pulumi import logdna:index/archive:Archive config archive
Copy

To learn more about importing existing cloud resources, see Importing resources.

Package Details

Repository
logdna logdna/terraform-provider-logdna
License
Notes
This Pulumi package is based on the logdna Terraform Provider.