Trying to create & use a conan package: lib dir not set - cmake

I created a conan package for a 3rd party tool. I'm trying to test it, and it doesn't seem to add the lib dir so the linker can't find the package's lib. Not sure how to debug it.
Here's the conanfile.py for the package ("hjson"):
class HjsonConan(ConanFile):
name = "hjson"
version = "2.2"
url = "https://github.com/hjson/hjson-cpp"
license="MIT"
# Optional metadata
description = "hjson C++ json library"
# Binary configuration
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False], "fPIC": [True, False], "version": "ANY"}
default_options = {"shared": False, "fPIC": True, "version": "2.2"}
# Sources are located in the same place as this recipe, copy them to the recipe
exports_sources = ("hjson-src/*")
def source(self):
# unpacks into ./hjson-src
get(self,
f"https://github.com/hjson/hjson-cpp/archive/refs/tags/{self.options.version}.tar.gz",
destination='hjson-src',
strip_root=True)
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
def layout(self):
# Sets source=., build=build/Release, generators=build/generators
cmake_layout(self)
def generate(self):
tc = CMakeToolchain(self)
# Here is where to set CMake variables ("-DXYZ")
tc.variables["HJSON_ENABLE_INSTALL"] = 1
tc.generate()
def build(self):
cmake = CMake(self)
cmake.configure(build_script_folder='hjson-src')
cmake.build()
def package(self):
cmake = CMake(self)
cmake.install()
def package_info(self):
self.cpp_info.libs = ["hjson"]
# these are the defaults, here for clarity
self.cpp_info.includedirs = ["include"]
self.cpp_info.libdirs = ["lib"]
Running conan create . works; it populates a subdir of my ~/.conan with the lib and headers and other metadata.
So I try to use it, with this conanfile:
from conan import ConanFile
from conan.tools.cmake import CMake, cmake_layout
from conan.tools.build import cross_building
class HJsonTestConan(ConanFile):
settings = "os", "compiler", "build_type", "arch"
# VirtualBuildEnv and VirtualRunEnv can be avoided if "tools.env.virtualenv:auto_use" is defined
# (it will be defined in Conan 2.0)
generators = "CMakeDeps", "CMakeToolchain", "VirtualBuildEnv", "VirtualRunEnv"
apply_env = False
test_type = "explicit"
def requirements(self):
self.requires("hjson/2.2")
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def layout(self):
cmake_layout(self)
def test(self):
if not cross_building(self):
cmd = os.path.join(self.cpp.build.bindirs[0], "example")
self.run(cmd, env="conanrun")
with this CMakeLists.txt:
cmake_minimum_required(VERSION 3.15)
project(PackageTest CXX)
find_package(hjson CONFIG REQUIRED)
add_executable(example src/example.cpp)
set_property(TARGET example PROPERTY CXX_STANDARD 20)
include_directories(${hjson_INCLUDE_DIRS})
link_directories(${hjson_LIB_DIRS})
target_link_libraries(example hjson)
but the link fails because it's not passing -L<conanpath> on the link line. Poking around, it looks like hjson_LIB_DIRS is not set in the generated build/generators/hjson-config.cmake:
set(hjson_VERSION_STRING "2.2")
set(hjson_INCLUDE_DIRS ${hjson_INCLUDE_DIRS_RELEASE} )
set(hjson_INCLUDE_DIR ${hjson_INCLUDE_DIRS_RELEASE} )
set(hjson_LIBRARIES ${hjson_LIBRARIES_RELEASE} )
set(hjson_DEFINITIONS ${hjson_DEFINITIONS_RELEASE} )
So where do I go to fix that? I did set the cpp_info.libdirs in the package, so not sure why it's not getting through to the generated config.
The created conan package looks like this, btw:
~/.conan/data/hjson/2.2/_/_/package/37c9d02f...91ef
├── conaninfo.txt
├── conanmanifest.txt
├── include
│ └── hjson
│ └── hjson.h
└── lib
├── hjson
│ ├── hjson-config-version.cmake
│ ├── hjson-config.cmake
│ ├── hjson-release.cmake
│ └── hjson.cmake
└── libhjson.a

Related

dependency and option in meson project

I have a project with a similar folder trees:
├── meson.build (1)
├── meson_options.txt
├── README.md
└── src
└── mysub
├── meson.build (2)
└── mesonTest.c
the meson.options.txt contains
option('avx_opt', type : 'combo', choices : ['avx2', 'avx512'], value : 'avx512')
the mysub project is a dependency of the main proj
so the meson.build (1) :
project(
'my_proj',
'c',
version : '1.1.0',
default_options : ['buildtype=plain','warning_level=3'],
subproject_dir : 'src'
)
project_source_files = [
''
]
message('## source root : ' + meson.project_source_root() + ' ##')
project_dependencies = [
dependency('mysub', fallback : ['mysub', 'mysub_dep']),
]
build_args = [
]
# ===================================================================
# ======
# Target
# ======
build_args += [
'-DPROJECT_NAME=' + meson.project_name(),
'-DPROJECT_VERSION=' + meson.project_version(),
]
the meson.build (2) of the mysub proj is:
project(
'mysub',
'c',
version : '1.1.0',
default_options : ['warning_level=3']
)
project_description = 'mysub binary'
project_source_files = [
'mesonTest.c'
]
project_headers = [
]
avx_type = get_option('avx_opt')
if (avx_type == 'avx512')
build_args_avx512 = [
'-mavx512f',
'-mavx512cd',
'-mavx512vl',
'-mavx512bw',
'-mavx512dq',
'-DNEWLDPC=1'
]
else
build_args_avx512 = [
'-DNEWLDPC=0'
]
endif
project_target = executable(
meson.project_name(),
project_source_files,
install : true,
c_args : build_args,
link_args : '-Wl,--allow-shlib-undefined',
)
# =======
# Project
# =======
# Make this library usable as a Meson subproject.
project_dep = declare_dependency(
include_directories: public_headers,
link_with : project_target
)
set_variable(meson.project_name() + '_dep', project_dep)
# Make this library usable from the system's
# package manager.
install_headers(project_headers, subdir : meson.project_name())
pkg_mod = import('pkgconfig')
pkg_mod.generate(
name : meson.project_name(),
filebase : meson.project_name(),
description : project_description,
subdirs : meson.project_name(),
# libraries : project_target,
)
I have tried to configure in the following way:
meson builddir -Davx_opt=avx512
or
meson builddir -Davx_opt:mysub=avx512
but in both case I got:
The Meson build system
Version: 0.59.1
Source dir: /home/roccasal/wsEclipse/Intel/mesonTest/proj
Build dir: /home/roccasal/wsEclipse/Intel/mesonTest/proj/builddir
Build type: native build
Project name: my_proj
Project version: 1.1.0
C compiler for the host machine: cc (gcc 8.5.0 "cc (GCC) 8.5.0 20210514 (Red Hat 8.5.0-4)")
C linker for the host machine: cc ld.bfd 2.30-108
Host machine cpu family: x86_64
Host machine cpu: x86_64
Message: ## source root : /home/roccasal/wsEclipse/Intel/mesonTest/proj ##
Found pkg-config: /usr/bin/pkg-config (1.4.2)
Found CMake: /usr/bin/cmake (3.20.2)
Run-time dependency mysub found: NO (tried pkgconfig and cmake)
Looking for a fallback subproject for the dependency mysub
Executing subproject mysub
mysub| Project name: mysub
mysub| Project version: 1.1.0
mysub| C compiler for the host machine: cc (gcc 8.5.0 "cc (GCC) 8.5.0 20210514 (Red Hat 8.5.0-4)")
mysub| C linker for the host machine: cc ld.bfd 2.30-108
src/mysub/meson.build:32:0: ERROR: Tried to access unknown option "avx_opt".
what is wrong in the meson build configuration?
the meson ver used is 0.59.1
//thanks
Check Build-options page in reference manual:
To change values in subprojects prepend the name of the subproject and
a colon:
$ meson configure -Dsubproject:option=newvalue
Thus, try create new build dir with:
meson builddir -Dmysub:avx_opt=avx512
or configure existing with:
meson configure builddir -Dmysub:avx_opt=avx512
To make it working you also need this option defined in meson_options.txt in every subproject that uses it, but to simplify configuration you can as #dcbaker suggested use yielding, i.e. update option definition for the main project:
option('avx_opt', ...., yield : true)
This will give you possibility to configure it the same way for main and subprojects with just:
meson configure builddir -Davx_opt=avx512
Also, (I guess it's just typo in question) file with options should have name meson_options.txt (with underscore).

How to test custom gradle plugin both with JUnit and manually in same sample project

My goal is to have this project setup:
example-gradle-plugin
├── build.gradle
└── src
├── main
│   ├── java
│   │   └── com ....
└── test
├── java
│   └── example
│   └── integrationtest
│ │ # GradleRunner.create().withProjectDir(testProjectPath.toFile()).withPluginClasspath()
│   └── SimpleProjectRegressionTest.java //
└── resources
└── simple
│ # plugins { id "example-gradle-plugin" }
├── build.gradle
│ # // includeBuild ../../../../example-gradle-plugin
└── settings.gradle
So the folder src/test/resources/simple is both used from a JUnit test, but as well can be used to run gradle commands from the command line using the composite build approach.
So this should work
cd src/test/resources/simple
gradle build
And this unit test should also work:
#Test
public void testBuildSample() {
final ClassLoader classLoader = ProjectSetupHelper.class.getClassLoader();
final Path sampleSourceRootPath = Paths.get(classLoader.getResource("simple").toURI());
final BuildResult result = GradleRunner.create()
.withProjectDir(sampleSourceRootPath.toFile())
.withArguments("build")
.withPluginClasspath()
.build();
}
However, there is a caveat when running JUnit, the custom-plugin-sources are referred to in 2 different ways at the same time:
GradleRunner.create().withProjectDir(testProjectPath.toFile()).withPluginClasspath() means to add project custom plugin files to the classpath for running the build during the unit test
In src/test/resources/simple/settings.gradle, the includeBuild ... command also refers to the custom plugin.
Is there an easier or cleaner way to achieve the above: Having a sample project with composite build that can be used from the commandline to verify local changes to the plugin, and using that sample project also in a unit test of the plugin?
Currently in the unit test I copy the sample folder to a temporary folder without settings.gradle to avoid such complications.
A little late to the party, but here is what I did.
I wrote a custom gradle plugin and was able to test it successfully with JUnit.
MyCustomPlugin.java
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import org.gradle.api.artifacts.dsl.DependencyHandler;
public class MyCustomPlugin implements Plugin<Project>
{
#Override
public void apply(Project project)
{
DependencyHandler dependencyHandler = project.getDependencies();
dependencyHandler.add(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME, "org.apache.commons:commons-lang3:3.12.0");
}
}
MyCustomPluginTest.java
import org.gradle.api.Project;
import org.gradle.testfixtures.ProjectBuilder;
import org.junit.jupiter.api.Test;
import static org.gradle.internal.impldep.org.junit.Assert.assertTrue;
public class MyCustomPluginTest {
#Test
public void testCustomPlugin()
{
Project project = ProjectBuilder.builder().build();
// Java plugin required for IMPLEMENTATION task in the custom plugin
project.getPluginManager().apply("java");
project.getPluginManager().apply("com.mycustomplugin");
assertTrue(project.getPluginManager().hasPlugin("com.mycustomplugin"));
}
}
Sources:
https://docs.gradle.org/current/userguide/custom_plugins.html#sec:writing_tests_for_your_plugin
https://www.baeldung.com/gradle-create-plugin

CMake linking error with multilevel src directory

I'm having issues linking my project together using cmake.
The project consists of a library, "mylib", and an executable, "mybin", which is linked with it. The structure is the following:
.
├── CMakeLists.txt
├── include
│ ├── mylib.h
│ ├── entities
│ │ ├── entities.h
│ │ ├── entityA.h
│ │ └── entityB.h
│ └── shapes
│ ├── shapes.h
│ ├── shapeA.h
│ └── entityB.h
└── src
├── main.cpp
├── entities
│ ├── entityA.cpp
│ └── entityB.cpp
└── shapes
├── shapeA.cpp
└── entityB.cpp
Where mylib uses all .cpp and .h files (except from main.cpp) to create a static library, and mybin uses main.cpp and links with mylib.h. mylib.h pulls the other header files from the include directory.
Right now, my CMakeLists.txt looks like this:
cmake_minimum_required(VERSION 3.13)
project(myproject VERSION 1.1.0 LANGUAGES C CXX)
add_library(mylib
src/entities/entityA.cpp
src/entities/entityB.cpp
src/shapes/shapeA.cpp
src/shapes/shapeB.cpp
include/mylib.h
include/entities/entities.h
include/entities/entitiesA.h
include/entities/entitiesB.h
include/shapes/shapes.h
include/shapes/shapeA.h
include/shapes/shapeB.h
)
target_include_directories(mylib PUBLIC include)
target_link_libraries(mylib PUBLIC)
target_compile_features(mylib PRIVATE cxx_std_11)
add_executable(mybin src/main.cpp)
target_link_libraries(mybin PRIVATE mylib)
target_compile_features(mybin PRIVATE cxx_std_11)
When I try to build my code, with
mkdir build
cd build
cmake ..
make
The library builds fine, but I get linking errors while the executable is building:
/usr/bin/ld: shapeA.cpp:(.text+0x344): undefined reference to `MyLib::Entities::EntityA::normalize_inplace()'
Not sure how to fix this, I tried to follow numeral tutorials and looking at other issues here and examples, but couldn't get to fix it. The actual code is in my github, in the "cmake" branch.
Thank you very much.
EDIT
The linking error I copied above is used in the src/shapes/shapeA.cpp file:
#include "shapes/shapeA.h"
#include <algorithm>
using MyLib::Entities::EntityA;
Mylib::Shapes::ShapeA::ShapeA() {}
Mylib::Shapes::ShapeA::~ShapeA(){}
bool Mylib::Shapes::ShapeA::intersect(const EntityA &entity) const {
const EntityA invdir = (EntityA(1.0, 1.0, 1.0) + entity).normalize_inplace();
return true;
}
It is defined in src/entities/entityA.cpp :
#include "entities/EntityA.h"
using MyLib::Entities::EntityA;
EntityA::EntityA(double a, double b, double c) :
a_(a), b_(b), c_(c) {}
EntityA::~EntityA(){}
EntityA EntityA::normalize_inplace(){
const double tot = std::sqrt(a_*a_ + b_*b_ + c_*c_);
a_ /= tot;
b_ /= tot;
c_ /= tot;
return EntityA(1.0, 1.0, 1.0);
}
EntityA EntityA::operator+(EntityA other) {
return EntiryA(a_ + other.a_, b_ + other.b_, c_ + other.c_);
}
There are many more linking errors like that one, I think nothing gets linked correctly at all.
Also, I used a makefile previously and all linking was being made correctly.

terraform v0.12.12 passing data between modules - How Is This Supposed To Work?

I'm struggling to understand the way data is passed in and out of modules in Terraform (v0.12.12). I have what I think is a very simple example, but can't understand how data is supposed to pass between modules. And most of the examples I can find are either incomplete or out-of-date.
I've created a simple example with two modules. A network module that creates a vpc and a subnet, and a compute module that creates an EC2 instance. I'm simply trying to provide the compute module with the id of the subnet where the EC2 instance should go. But I don't understand:
how do I get the subnet_id out of the network module that creates
the subnet to other modules can use it?
How do I get the compute module to use the subnet_id ?
The basic structure is as follows
.
├── main.tf
└── modules
├── compute
│ └── main.tf
└── network
├── main.tf
└── output.tf
# main.tf
provider "aws" {
region = "eu-west-1"
}
module "m_network" {
source = "./modules/network"
}
# The problem is how to make that subnet id available to the compute module
# so the ec2 instance can be added to it?
module "m_compute" {
source = "./modules/compute"
# I wondered if the m_compute module should pass in a parameter, but
# Any attempt to pass a parameter gives an error: An argument "subnet_id" is not expected here.
#xxx = "xxx" # This fails to.
# subnet_id = module.m_network.subnet_id
}
resource "aws_vpc" "myvpc" {
cidr_block = "10.0.0.0/16"
}
# Create subnets in each availability zone to launch our instances into, each with address blocks within the VPC:
resource "aws_subnet" "myvpc_subnet" {
vpc_id = "${aws_vpc.myvpc.id}"
cidr_block = "10.0.1.0/24"
}
# Generates subnet attributes that can be passed to other modules
output "myvpc_subnet_id" {
description = "Subnet ID"
value = "${aws_subnet.myvpc_subnet.id}"
}
resource "aws_instance" "app" {
ami = "ami-13be557e"
instance_type = "t2.micro"
subnet_id = aws_subnet.myvpc_subnet_id # What should go here?
}
You need to add a variables.tf file to your compute module, so that it can receive the subnet_id from the network module.
Check variables.tf file content and the main.tf of the compute module
to see how to access the input variable.
The structure of an example should be as follows.
.
├── main.tf
└── modules
├── compute
│ ├── main.tf
│ └── variables.tf
└── network
├── main.tf
└── output.tf
Then inside each file you can do something like this.
# main.tf
provider "aws" {
region = "eu-west-1"
}
# Call network module and receive output
module "m_network" {
source = "./modules/network"
}
module "m_compute" {
source = "./modules/compute"
# pass the output of the network module
# as input variables for the compute module
subnet_id = module.m_network.output_subnet_id
}
# compute module | variables.tf
# declare a input variable for the compute module
variable "subnet_id" {
description = "The subnet ID from the network module"
# You can also enforce the type with
# type = string OR number OR etc.
}
# compute module | main.tf
resource "aws_instance" "app" {
ami = "ami-13be557e"
instance_type = "t2.micro"
# you can use variables with var.{name}
# access the subnet id variable
subnet_id = var.subnet_id
}
# network module | main.tf
resource "aws_vpc" "myvpc" {
cidr_block = "10.0.0.0/16"
}
resource "aws_subnet" "myvpc_subnet" {
vpc_id = "${aws_vpc.myvpc.id}"
cidr_block = "10.0.1.0/24"
}
# network module | output.tf
output "output_subnet_id" {
description = "Subnet ID"
value = "${aws_subnet.myvpc_subnet.id}"
}

Upload folder with sub-folders and files on S3 using python

I have a folder with bunch of subfolders and files which I am fetching from a server and assigning to a variable. Folder Structure is as follows:
└── main_folder
├── folder
│   ├── folder
│   │   ├── folder
│   │   │   └── a.json
│   │   ├── folder
│   │   │   ├── folder
│   │   │   │   └── b.json
│   │   │   ├── folder
│   │   │   │   └── c.json
│   │   │   └── folder
│   │   │   └── d.json
│   │   └── folder
│   │   └── e.json
│   ├── folder
│   │   └── f.json
│   └── folder
│   └── i.json
Now I want to upload this main_folder to S3 bucket with the same structure using boto3. In boto3 there is no way to upload folder on s3.
I have seen the solution on this link but they fetching the files from local machine and I have fetching the data from server and assigining to variable.
Uploading a folder full of files to a specific folder in Amazon S3
upload a directory to s3 with boto
https://gist.github.com/feelinc/d1f541af4f31d09a2ec3
Has anybody faced the same type of issue?
Below is code that works for me, pure python3.
""" upload one directory from the current working directory to aws """
from pathlib import Path
import os
import glob
import boto3
def upload_dir(localDir, awsInitDir, bucketName, tag, prefix='/'):
"""
from current working directory, upload a 'localDir' with all its subcontents (files and subdirectories...)
to a aws bucket
Parameters
----------
localDir : localDirectory to be uploaded, with respect to current working directory
awsInitDir : prefix 'directory' in aws
bucketName : bucket in aws
tag : tag to select files, like *png
NOTE: if you use tag it must be given like --tag '*txt', in some quotation marks... for argparse
prefix : to remove initial '/' from file names
Returns
-------
None
"""
s3 = boto3.resource('s3')
cwd = str(Path.cwd())
p = Path(os.path.join(Path.cwd(), localDir))
mydirs = list(p.glob('**'))
for mydir in mydirs:
fileNames = glob.glob(os.path.join(mydir, tag))
fileNames = [f for f in fileNames if not Path(f).is_dir()]
rows = len(fileNames)
for i, fileName in enumerate(fileNames):
fileName = str(fileName).replace(cwd, '')
if fileName.startswith(prefix): # only modify the text if it starts with the prefix
fileName = fileName.replace(prefix, "", 1) # remove one instance of prefix
print(f"fileName {fileName}")
awsPath = os.path.join(awsInitDir, str(fileName))
s3.meta.client.upload_file(fileName, bucketName, awsPath)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--localDir", help="which dir to upload to aws")
parser.add_argument("--bucketName", help="to which bucket to upload in aws")
parser.add_argument("--awsInitDir", help="to which 'directory' in aws")
parser.add_argument("--tag", help="some tag to select files, like *png", default='*')
args = parser.parse_args()
# cd whatever is above your dir, then run it
# (below assuming this script is in ~/git/hu-libraries/netRoutines/uploadDir2Aws.py )
# in the example below you have directory structure ~/Downloads/IO
# you copy full directory of ~/Downloads/IO to aws bucket markus1 to 'directory' 2020/IO
# NOTE: if you use tag it must be given like --tag '*txt', in some quotation marks...
# cd ~/Downloads
# python ~/git/hu-libraries/netRoutines/uploadDir2Aws.py --localDir IO --bucketName markus1 --awsInitDir 2020
upload_dir(localDir=args.localDir, bucketName=args.bucketName,
awsInitDir=args.awsInitDir, tag=args.tag)
I had to solve this problem myself, so thought I would include a snippet of my code here.
I also had the requirement to filter for specific file types, and upload the directory contents only (vs the directory itself).
import logging
import boto3
from pathlib import Path
log = logging.getLogger(__name__)
def upload_dir(
self,
local_dir: Union[str, Path],
s3_path: str = "/",
file_type: str = "",
contents_only: bool = False,
) -> bool:
"""
Upload the content of a local directory to a bucket path.
Args:
local_dir (Union[str, Path]): Directory to upload files from.
s3_path (str, optional): The path within the bucket to upload to.
If omitted, the bucket root is used.
file_type (str, optional): Upload files with extension only, e.g. txt.
contents_only (bool): Used to copy only the directory contents to the
specified path, not the directory itself.
Returns:
dict: key:value pair of file_name:upload_status.
upload_status True if uploaded, False if failed.
"""
resource = boto3.resource(
"s3",
aws_access_key_id="xxx",
aws_secret_access_key="xxx",
endpoint_url="xxx",
region_name=Bucket"xxx",
)
status_dict = {}
local_dir_path = Path(local_dir).resolve()
log.debug(f"Directory to upload: {local_dir_path}")
all_subdirs = local_dir_path.glob("**")
for dir_path in all_subdirs:
log.debug(f"Searching for files in directory: {dir_path}")
file_names = dir_path.glob(f"*{('.' + file_type) if file_type else ''}")
# Only return valid files
file_names = [f for f in file_names if f.is_file()]
log.debug(f"Files found: {list(file_names)}")
for _, file_name in enumerate(file_names):
s3_key = str(Path(s3_path) / file_name.relative_to(
local_dir_path if contents_only else local_dir_path.parent
))
log.debug(f"S3 key to upload: {s3_key}")
status_dict[str(file_name)] = self.upload_file(s3_key, file_name)
return status_dict