I want to upload my package to Gitlab Package Registry, using the plugin availaible at -> https://github.com/gilcloud/sbt-gitlab
I fully followed the instructions given at the readme file. But when I am trying to use sbt publish I am getting error.
[error] (xProject / publishConfiguration) Repository for publishing is not specified. [error] (tProject / publishConfiguration) Repository for publishing is not specified.
Any clue how to do it ? The maven way was quite simple, but facing lots of issue when using sbt.
Here is my SBT file :
import sbt.Keys.{packageBin, packagedArtifacts, _} import sbtassembly.AssemblyKeys.{assembly, assemblyMergeStrategy} // Platform and framework versions. val demoOrg = "com.demo" val demoVersion = "1.0.0-SNAPSHOT" val demoName = "demo" val _javaVersion = "1.8" // Valid values: "1.8"|"11" val _scalaVersion = "2.12.12" // Scala version must be compatible with Spark Scala version val scalaMajorVersion = "2.12" // Scala version must be compatible with Spark Scala version val sparkVersion = "3.0.1" val _javaOptions = Seq("-Xmx2G") val _javacOptions = Seq("-source", _javaVersion, "-target", _javaVersion, "-Xlint", "-Xdiags:verbose") ThisBuild / useCoursier := false import com.gilcloud.sbt.gitlab.{GitlabCredentials,GitlabPlugin} GitlabPlugin.autoImport.gitlabGroupId := Some(101010) GitlabPlugin.autoImport.gitlabProjectId := Some(0101010) GitlabPlugin.autoImport.gitlabDomain := "gitlab.com" // Alternatively for credential managment // ideal for pulling artifacts locally and keeping tokens out of your source control // see below for sample .credentials file credentials += Credentials(Path.userHome / ".sbt" / ".credentials.gitlab") val _scalacOptions = Seq( "-deprecation", // Emit warning and location for usages of deprecated APIs. "-encoding", "utf-8", // Specify character encoding used by source files. "-explaintypes", // Explain type errors in more detail. "-feature", // Emit warning and location for usages of features that should be imported explicitly. "-g:vars", "-language:_", "-language:existentials", // Existential types (besides wildcard types) can be written and inferred "-language:experimental.macros", // Allow macro definition (besides implementation and application) "-language:higherKinds", // Allow higher-kinded types "-language:implicitConversions", // Allow definition of implicit functions called views "-unchecked", // Enable additional warnings where generated code depends on assumptions. "-Xcheckinit", // Wrap field accessors to throw an exception on uninitialized access. "-Xexperimental", // Former graveyard for language-forking extensions. "-Xfatal-warnings", // Fail the compilation if there are any warnings. "-Xfuture", // Turn on future language features. "-Xlint:_", "-Xlint:adapted-args", // Warn if an argument list is modified to match the receiver. "-Xlint:by-name-right-associative", // By-name parameter of right associative operator. "-Xlint:constant", // Evaluation of a constant arithmetic expression results in an error. "-Xlint:delayedinit-select", // Selecting member of DelayedInit. "-Xlint:doc-detached", // A Scaladoc comment appears to be detached from its element. "-Xlint:inaccessible", // Warn about inaccessible types in method signatures. "-Xlint:infer-any", // Warn when a type argument is inferred to be `Any`. "-Xlint:missing-interpolator", // A string literal appears to be missing an interpolator id. "-Xlint:nullary-override", // Warn when non-nullary `def f()' overrides nullary `def f'. "-Xlint:nullary-unit", // Warn when nullary methods return Unit. "-Xlint:option-implicit", // Option.apply used implicit view. "-Xlint:package-object-classes", // Class or object defined in package object. "-Xlint:poly-implicit-overload", // Parameterized overloaded implicit methods are not visible as view bounds. "-Xlint:private-shadow", // A private field (or class parameter) shadows a superclass field. "-Xlint:stars-align", // Pattern sequence wildcard must align with sequence component. "-Xlint:type-parameter-shadow", // A local type parameter shadows a type already in scope. "-Xlint:unsound-match", // Pattern match may not be typesafe. "-Yno-adapted-args", // Do not adapt an argument list (either by inserting () or creating a tuple) to match the receiver. "-Ypartial-unification", // Enable partial unification in type constructor inference "-Ywarn-dead-code", // Warn when dead code is identified. "-Ywarn-extra-implicit", // Warn when more than one implicit parameter section is defined. "-Ywarn-inaccessible", // Warn about inaccessible types in method signatures. "-Ywarn-infer-any", // Warn when a type argument is inferred to be `Any`. "-Ywarn-nullary-override", // Warn when non-nullary `def f()' overrides nullary `def f'. "-Ywarn-nullary-unit", // Warn when nullary methods return Unit. "-Ywarn-numeric-widen", // Warn when numerics are widened. "-Ywarn-unused:implicits", // Warn if an implicit parameter is unused. "-Ywarn-unused:imports", // Warn if an import selector is not referenced. "-Ywarn-unused:locals", // Warn if a local definition is unused. "-Ywarn-unused:params", // Warn if a value parameter is unused. "-Ywarn-unused:patvars", // Warn if a variable bound in a pattern is unused. "-Ywarn-unused:privates", // Warn if a private member is unused. "-Ywarn-value-discard" // Warn when non-Unit expression results are unused. ) // Common settings lazy val commonSettings = Seq(organization := demoOrg, version := demoVersion, scalaVersion := _scalaVersion, scalacOptions := _scalacOptions, javacOptions := _javacOptions, javaOptions := _javaOptions, resolvers := Seq(Resolver.mavenCentral, Resolver.defaultLocal, Resolver.mavenLocal), fork := true) // Needed as SBT's classloader doesn't work well with Spark // Assembly settings val assemblySettings = Seq(assemblyOption in assembly := (assemblyOption in assembly).value.copy(includeScala = true, includeDependency = true), test in assembly := {}, assemblyMergeStrategy in assembly := { case PathList("META-INF", "hk2-locator", xs@_*) => MergeStrategy.discard case PathList("META-INF", "versions", xs@_*) => MergeStrategy.last case PathList("META-INF", "DISCLAIMER", xs@_*) => MergeStrategy.last case PathList("META-INF", "io.netty.versions.properties", xs@_*) => MergeStrategy.last case PathList("org", "aopalliance", xs@_*) => MergeStrategy.discard case PathList("org", "objectweb", "asm", xs@_*) => MergeStrategy.first case PathList("org", "xmlpull", "v1", xs@_*) => MergeStrategy.last case PathList("org", "objenesis", xs@_*) => MergeStrategy.last case PathList("javax", "activation", xs@_*) => MergeStrategy.last case PathList("javax", "el", xs@_*) => MergeStrategy.first case PathList("javax", "inject", xs@_*) => MergeStrategy.first case PathList("javax", "validation", xs@_*) => MergeStrategy.first case PathList("javax", "servlet", "jsp", xs@_*) => MergeStrategy.first case PathList("mozilla", "public-suffix-list.txt", xs@_*) => MergeStrategy.last case PathList("org", "apache", "commons", "logging", xs@_*) => MergeStrategy.last case "log4j.properties" | "module-info.class" => MergeStrategy.discard case x => val oldStrategy = (assemblyMergeStrategy in assembly).value oldStrategy(x) }, assemblyExcludedJars in assembly := { val cp = (fullClasspath in assembly).value cp.filter(p => p.data.getName.startsWith("log4j-over-slf4j") || p.data.getName.startsWith("jcl-over-slf4j") || p.data.getName.startsWith("jul-to-slf4j") || p.data.getName.startsWith("slf4j-log4j")) }) // Root project: attempts to suppress empty root JAR generation failed so far. lazy val rootProject = (project in file(".")) .disablePlugins(sbtassembly.AssemblyPlugin) .settings(commonSettings, name := demoName, skip in packageBin := true, skip in publish := true, packagedArtifacts := Map.empty) .aggregate(commonProject, discoveryProject, catalogProject, clientProject) // Attempt to suppress packaging and publishing of an empty root project artifact. publishArtifact in rootProject := false packagedArtifacts in rootProject := Map.empty lazy val commonProject = (project in file("demo-common")) .disablePlugins(sbtassembly.AssemblyPlugin) .settings(commonSettings, name := "demo-demo-common", libraryDependencies += "org.apache.opennlp" % "opennlp-tools" % "1.9.2" % Compile, libraryDependencies += "com.squareup.okhttp3" % "okhttp" % "4.9.0" % Compile, libraryDependencies += "com.google.code.gson" % "gson" % "2.8.6" % Compile, libraryDependencies += "org.json" % "json" % "20200518" % Compile, libraryDependencies += "org.apache.commons" % "commons-lang3" % "3.10" % Compile, libraryDependencies += "org.slf4j" % "slf4j-api" % "1.7.30" % Provided, libraryDependencies += "org.slf4j" % "slf4j-log4j12" % "1.7.30" % Provided, libraryDependencies += "org.scalatest" %% "scalatest" % "3.1.1" % Test, libraryDependencies += "org.scalacheck" %% "scalacheck" % "1.14.3" % Test) lazy val discoveryProject = (project in file("demo-discovery")) .settings(commonSettings, name := "demo-demo-discovery", libraryDependencies += "com.h2database" % "h2" % "1.4.200" % Provided, libraryDependencies += "io.prestosql" % "presto-jdbc" % "338" % Provided, libraryDependencies += "org.testng" % "testng" % "7.1.0" % Test, libraryDependencies += "org.slf4j" % "slf4j-api" % "1.7.30" % Provided, libraryDependencies += "org.slf4j" % "slf4j-log4j12" % "1.7.30" % Provided, libraryDependencies += "org.scalatest" %% "scalatest" % "3.1.1" % Test, libraryDependencies += "org.scalacheck" %% "scalacheck" % "1.14.3" % Test, assemblySettings) .dependsOn(commonProject) lazy val catalogProject = (project in file("demo-catalog")) .settings(commonSettings, name := "demo-demo-catalog", libraryDependencies += "com.thoughtworks.paranamer" % "paranamer" % "2.8" % Compile, libraryDependencies += "net.sansa-stack" %% "sansa-rdf-spark" % "0.7.7-SNAPSHOT" % Compile, libraryDependencies += "org.apache.spark" %% "spark-core" % sparkVersion % Provided, libraryDependencies += "org.apache.spark" %% "spark-sql" % sparkVersion % Provided, libraryDependencies += "org.apache.spark" %% "spark-streaming" % sparkVersion % Provided, libraryDependencies += "org.apache.spark" %% "spark-graphx" % sparkVersion % Provided, dependencyOverrides += "org.slf4j" % "slf4j-api" % "1.7.30" % Provided, dependencyOverrides += "org.slf4j" % "slf4j-log4j12" % "1.7.30" % Provided, libraryDependencies += "org.scalatest" %% "scalatest" % "3.1.1" % Test, libraryDependencies += "org.scalacheck" %% "scalacheck" % "1.14.3" % Test, assemblySettings, assemblyOption in assembly := (assemblyOption in assembly).value.copy(includeScala = false)) .dependsOn(commonProject, discoveryProject) lazy val clientProject = (project in file("demo-client")) .settings(commonSettings, name := "demo-demo-client", libraryDependencies += "org.apache.spark" %% "spark-core" % sparkVersion % Provided, libraryDependencies += "org.apache.spark" %% "spark-sql" % sparkVersion % Provided, libraryDependencies += "org.apache.spark" %% "spark-streaming" % sparkVersion % Provided, libraryDependencies += "org.apache.spark" %% "spark-graphx" % sparkVersion % Provided, dependencyOverrides += "org.slf4j" % "slf4j-api" % "1.7.30" % Provided, dependencyOverrides += "org.slf4j" % "slf4j-log4j12" % "1.7.30" % Provided, libraryDependencies += "org.scalatest" %% "scalatest" % "3.1.1" % Test, libraryDependencies += "org.scalacheck" %% "scalacheck" % "1.14.3" % Test, assemblySettings) .dependsOn(commonProject, catalogProject)
https://stackoverflow.com/questions/65386349/how-to-use-package-registry-with-sbt-project December 21, 2020 at 09:05AM
没有评论:
发表评论