diff --git a/.gitignore b/.gitignore index 347d0a1289..c3b5a993bc 100644 --- a/.gitignore +++ b/.gitignore @@ -60,3 +60,4 @@ logs/ **/generated-sources/ /distribution /vault* +/airavata-api/distribution/ diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/AppEnvironmentEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/AppEnvironmentEntity.java index 14fa6f3f6d..c0bb5bda7c 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/AppEnvironmentEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/AppEnvironmentEntity.java @@ -34,7 +34,7 @@ public class AppEnvironmentEntity implements Serializable { private static final long serialVersionUID = 1L; @Id - @Column(name = "DEPLOYMENT_ID") + @Column(name = "DEPLOYMENT_ID", insertable = false, updatable = false) private String deploymentId; @Column(name = "VALUE") diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/AppModuleMappingEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/AppModuleMappingEntity.java index ede77fe110..aaf2d7ddf1 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/AppModuleMappingEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/AppModuleMappingEntity.java @@ -33,11 +33,11 @@ public class AppModuleMappingEntity implements Serializable { private static final long serialVersionUID = 1L; @Id - @Column(name = "INTERFACE_ID") + @Column(name = "INTERFACE_ID", insertable = false, updatable = false) private String interfaceId; @Id - @Column(name = "MODULE_ID") + @Column(name = "MODULE_ID", insertable = false, updatable = false) private String moduleId; @ManyToOne(targetEntity = ApplicationInterfaceEntity.class) diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ApplicationInputEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ApplicationInputEntity.java index c471c8dfcd..b45dcdc735 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ApplicationInputEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ApplicationInputEntity.java @@ -36,7 +36,7 @@ public class ApplicationInputEntity implements Serializable { private static final long serialVersionUID = 1L; @Id - @Column(name = "INTERFACE_ID") + @Column(name = "INTERFACE_ID", insertable = false, updatable = false) private String interfaceId; @Id diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ApplicationOutputEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ApplicationOutputEntity.java index c47011f44a..682141d4c1 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ApplicationOutputEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ApplicationOutputEntity.java @@ -35,7 +35,7 @@ public class ApplicationOutputEntity implements Serializable { private static final long serialVersionUID = 1L; @Id - @Column(name = "INTERFACE_ID") + @Column(name = "INTERFACE_ID", insertable = false, updatable = false) private String interfaceId; @Id diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/BatchQueueEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/BatchQueueEntity.java index a7222621b7..ec892b0e9f 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/BatchQueueEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/BatchQueueEntity.java @@ -32,7 +32,7 @@ public class BatchQueueEntity implements Serializable { private static final long serialVersionUID = 1L; @Id - @Column(name = "COMPUTE_RESOURCE_ID") + @Column(name = "COMPUTE_RESOURCE_ID", insertable = false, updatable = false) private String computeResourceId; @Id diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/BatchQueueResourcePolicyEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/BatchQueueResourcePolicyEntity.java index fec67cd7d7..ab59b20e75 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/BatchQueueResourcePolicyEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/BatchQueueResourcePolicyEntity.java @@ -45,7 +45,7 @@ public class BatchQueueResourcePolicyEntity implements Serializable { @Column(name = "COMPUTE_RESOURCE_ID") private String computeResourceId; - @Column(name = "GROUP_RESOURCE_PROFILE_ID") + @Column(name = "GROUP_RESOURCE_PROFILE_ID", insertable = false, updatable = false) private String groupResourceProfileId; @Column(name = "QUEUE_NAME") diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ComputeResourceEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ComputeResourceEntity.java index d77db7d873..cb856d74e2 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ComputeResourceEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ComputeResourceEntity.java @@ -33,10 +33,11 @@ public class ComputeResourceEntity implements Serializable { private static final long serialVersionUID = 1L; @Id - @Column(name = "RESOURCE_ID") + @Column(name = "RESOURCE_ID", nullable = false, length = 255) private String computeResourceId; - @Column(name = "CREATION_TIME") + @Column(name = "CREATION_TIME", nullable = false) + @Temporal(TemporalType.TIMESTAMP) private Timestamp creationTime; @Column(name = "ENABLED") @@ -49,9 +50,9 @@ public class ComputeResourceEntity implements Serializable { private String gatewayUsageModuleLoadCommand; @Column(name = "GATEWAY_USAGE_REPORTING") - private boolean gatewayUsageReporting; + private Boolean gatewayUsageReporting; - @Column(name = "HOST_NAME") + @Column(name = "HOST_NAME", nullable = false, length = 255) private String hostName; @Column(name = "MAX_MEMORY_NODE") @@ -60,7 +61,8 @@ public class ComputeResourceEntity implements Serializable { @Column(name = "RESOURCE_DESCRIPTION") private String resourceDescription; - @Column(name = "UPDATE_TIME") + @Column(name = "UPDATE_TIME", nullable = false) + @Temporal(TemporalType.TIMESTAMP) private Timestamp updateTime; @Column(name = "CPUS_PER_NODE") @@ -75,13 +77,21 @@ public class ComputeResourceEntity implements Serializable { @Column(name = "DEFAULT_WALLTIME") private Integer defaultWalltime; - @ElementCollection(fetch = FetchType.EAGER) - @CollectionTable(name = "HOST_ALIAS", joinColumns = @JoinColumn(name = "RESOURCE_ID")) + @ElementCollection(fetch = FetchType.LAZY) + @CollectionTable( + name = "HOST_ALIAS", + joinColumns = @JoinColumn(name = "RESOURCE_ID"), + foreignKey = @ForeignKey(name = "host_alias_ibfk_1") + ) @Column(name = "ALIAS") private List hostAliases; - @ElementCollection(fetch = FetchType.EAGER) - @CollectionTable(name = "HOST_IPADDRESS", joinColumns = @JoinColumn(name = "RESOURCE_ID")) + @ElementCollection(fetch = FetchType.LAZY) + @CollectionTable( + name = "HOST_IPADDRESS", + joinColumns = @JoinColumn(name = "RESOURCE_ID"), + foreignKey = @ForeignKey(name = "host_ipaddress_ibfk_1") + ) @Column(name = "IP_ADDRESS") private List ipAddresses; @@ -89,21 +99,21 @@ public class ComputeResourceEntity implements Serializable { targetEntity = BatchQueueEntity.class, cascade = CascadeType.ALL, mappedBy = "computeResource", - fetch = FetchType.EAGER) + fetch = FetchType.LAZY) private List batchQueues; @OneToMany( targetEntity = JobSubmissionInterfaceEntity.class, cascade = CascadeType.ALL, mappedBy = "computeResource", - fetch = FetchType.EAGER) + fetch = FetchType.LAZY) private List jobSubmissionInterfaces; @OneToMany( targetEntity = DataMovementInterfaceEntity.class, cascade = CascadeType.ALL, mappedBy = "computeResource", - fetch = FetchType.EAGER) + fetch = FetchType.LAZY) private List dataMovementInterfaces; public ComputeResourceEntity() {} @@ -132,11 +142,11 @@ public void setGatewayUsageExecutable(String gatewayUsageExecutable) { this.gatewayUsageExecutable = gatewayUsageExecutable; } - public boolean isGatewayUsageReporting() { + public Boolean isGatewayUsageReporting() { return gatewayUsageReporting; } - public void setGatewayUsageReporting(boolean gatewayUsageReporting) { + public void setGatewayUsageReporting(Boolean gatewayUsageReporting) { this.gatewayUsageReporting = gatewayUsageReporting; } diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ComputeResourceFileSystemEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ComputeResourceFileSystemEntity.java index 84ff4dfe6a..659134fa0f 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ComputeResourceFileSystemEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ComputeResourceFileSystemEntity.java @@ -33,7 +33,7 @@ public class ComputeResourceFileSystemEntity implements Serializable { private static final long serialVersionUID = 1L; - @Column(name = "COMPUTE_RESOURCE_ID") + @Column(name = "COMPUTE_RESOURCE_ID", insertable = false, updatable = false) @Id private String computeResourceId; diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ComputeResourcePolicyEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ComputeResourcePolicyEntity.java index 85b41062e9..629c8ad04a 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ComputeResourcePolicyEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ComputeResourcePolicyEntity.java @@ -49,7 +49,7 @@ public class ComputeResourcePolicyEntity implements Serializable { @Column(name = "COMPUTE_RESOURCE_ID") private String computeResourceId; - @Column(name = "GROUP_RESOURCE_PROFILE_ID") + @Column(name = "GROUP_RESOURCE_PROFILE_ID", insertable = false, updatable = false) private String groupResourceProfileId; // TODO: Store COMPUTE_RESOURCE_ID and QUEUE_NAME in table so it can FK to BATCH_QUEUE diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ComputeResourcePreferenceEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ComputeResourcePreferenceEntity.java index 0a316359d8..d7c48b296e 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ComputeResourcePreferenceEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ComputeResourcePreferenceEntity.java @@ -35,7 +35,7 @@ public class ComputeResourcePreferenceEntity implements Serializable { private static final long serialVersionUID = 1L; - @Column(name = "GATEWAY_ID") + @Column(name = "GATEWAY_ID", insertable = false, updatable = false) @Id private String gatewayId; diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/DataMovementInterfaceEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/DataMovementInterfaceEntity.java index 21e1f2a038..5473a24f9f 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/DataMovementInterfaceEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/DataMovementInterfaceEntity.java @@ -33,7 +33,7 @@ public class DataMovementInterfaceEntity implements Serializable { private static final long serialVersionUID = 1L; - @Column(name = "COMPUTE_RESOURCE_ID") + @Column(name = "COMPUTE_RESOURCE_ID", insertable = false, updatable = false) @Id private String computeResourceId; diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/GridftpEndpointEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/GridftpEndpointEntity.java index add631d206..e1b8b198c4 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/GridftpEndpointEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/GridftpEndpointEntity.java @@ -33,7 +33,7 @@ public class GridftpEndpointEntity implements Serializable { private static final long serialVersionUID = 1L; @Id - @Column(name = "DATA_MOVEMENT_INTERFACE_ID") + @Column(name = "DATA_MOVEMENT_INTERFACE_ID", insertable = false, updatable = false) private String dataMovementInterfaceId; @Id diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/GroupComputeResourcePrefEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/GroupComputeResourcePrefEntity.java index 7c0cd30d5f..82b0ae34ef 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/GroupComputeResourcePrefEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/GroupComputeResourcePrefEntity.java @@ -55,7 +55,7 @@ public abstract class GroupComputeResourcePrefEntity implements Serializable { @Id private String computeResourceId; - @Column(name = "GROUP_RESOURCE_PROFILE_ID") + @Column(name = "GROUP_RESOURCE_PROFILE_ID", insertable = false, updatable = false) @Id private String groupResourceProfileId; diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/GroupSSHAccountProvisionerConfig.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/GroupSSHAccountProvisionerConfig.java index b117df577e..1e1b9d1ba3 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/GroupSSHAccountProvisionerConfig.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/GroupSSHAccountProvisionerConfig.java @@ -42,11 +42,11 @@ public class GroupSSHAccountProvisionerConfig implements Serializable { private static final long serialVersionUID = 1L; @Id - @Column(name = "RESOURCE_ID") + @Column(name = "RESOURCE_ID", insertable = false, updatable = false) private String resourceId; @Id - @Column(name = "GROUP_RESOURCE_PROFILE_ID") + @Column(name = "GROUP_RESOURCE_PROFILE_ID", insertable = false, updatable = false) private String groupResourceProfileId; @Id diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/JobManagerCommandEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/JobManagerCommandEntity.java index d4c5256d91..9113dbe678 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/JobManagerCommandEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/JobManagerCommandEntity.java @@ -33,7 +33,7 @@ public class JobManagerCommandEntity implements Serializable { private static final long serialVersionUID = 1L; @Id - @Column(name = "RESOURCE_JOB_MANAGER_ID") + @Column(name = "RESOURCE_JOB_MANAGER_ID", insertable = false, updatable = false) private String resourceJobManagerId; @Id diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/JobSubmissionInterfaceEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/JobSubmissionInterfaceEntity.java index 6bea6a63c7..40500d68c7 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/JobSubmissionInterfaceEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/JobSubmissionInterfaceEntity.java @@ -33,7 +33,7 @@ public class JobSubmissionInterfaceEntity implements Serializable { private static final long serialVersionUID = 1L; - @Column(name = "COMPUTE_RESOURCE_ID") + @Column(name = "COMPUTE_RESOURCE_ID", insertable = false, updatable = false) @Id private String computeResourceId; diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/LibraryApendPathEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/LibraryApendPathEntity.java index 8afe6efbf3..262bc46929 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/LibraryApendPathEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/LibraryApendPathEntity.java @@ -34,7 +34,7 @@ public class LibraryApendPathEntity implements Serializable { private static final long serialVersionUID = 1L; @Id - @Column(name = "DEPLOYMENT_ID") + @Column(name = "DEPLOYMENT_ID", insertable = false, updatable = false) private String deploymentId; @Column(name = "VALUE") diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/LibraryPrependPathEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/LibraryPrependPathEntity.java index 7c61c0d072..d573279dda 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/LibraryPrependPathEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/LibraryPrependPathEntity.java @@ -35,7 +35,7 @@ public class LibraryPrependPathEntity implements Serializable { private static final long serialVersionUID = 1L; @Id - @Column(name = "DEPLOYMENT_ID") + @Column(name = "DEPLOYMENT_ID", insertable = false, updatable = false) private String deploymentId; @Column(name = "VALUE") diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/LocalSubmissionEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/LocalSubmissionEntity.java index 9b7b62a20c..d7df4bf3d9 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/LocalSubmissionEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/LocalSubmissionEntity.java @@ -42,7 +42,7 @@ public class LocalSubmissionEntity implements Serializable { @Column(name = "UPDATE_TIME") private Timestamp updateTime; - @Column(name = "RESOURCE_JOB_MANAGER_ID") + @Column(name = "RESOURCE_JOB_MANAGER_ID", insertable = false, updatable = false) private String resourceJobManagerId; @Column(name = "SECURITY_PROTOCOL") diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ModuleLoadCmdEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ModuleLoadCmdEntity.java index fae4d7087e..05c700cf0a 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ModuleLoadCmdEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ModuleLoadCmdEntity.java @@ -34,7 +34,7 @@ public class ModuleLoadCmdEntity implements Serializable { private static final long serialVersionUID = 1L; @Id - @Column(name = "APP_DEPLOYMENT_ID") + @Column(name = "APP_DEPLOYMENT_ID", insertable = false, updatable = false) private String appdeploymentId; @Id diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ParallelismCommandEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ParallelismCommandEntity.java index be4b15d7cf..97afa83921 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ParallelismCommandEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ParallelismCommandEntity.java @@ -33,7 +33,7 @@ public class ParallelismCommandEntity implements Serializable { private static final long serialVersionUID = 1L; @Id - @Column(name = "RESOURCE_JOB_MANAGER_ID") + @Column(name = "RESOURCE_JOB_MANAGER_ID", insertable = false, updatable = false) private String resourceJobManagerId; @Id diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ParserConnectorEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ParserConnectorEntity.java index 8147e238ba..7d691c8c2c 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ParserConnectorEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ParserConnectorEntity.java @@ -32,13 +32,13 @@ public class ParserConnectorEntity implements Serializable { @Column(name = "PARSER_CONNECTOR_ID") private String id; - @Column(name = "PARENT_PARSER_ID") + @Column(name = "PARENT_PARSER_ID", insertable = false, updatable = false) private String parentParserId; - @Column(name = "CHILD_PARSER_ID") + @Column(name = "CHILD_PARSER_ID", insertable = false, updatable = false) private String childParserId; - @Column(name = "PARSING_TEMPLATE_ID") + @Column(name = "PARSING_TEMPLATE_ID", insertable = false, updatable = false) private String parsingTemplateId; @OneToMany( diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ParserConnectorInputEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ParserConnectorInputEntity.java index 2a758cdb4e..243ca7ad0a 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ParserConnectorInputEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ParserConnectorInputEntity.java @@ -31,16 +31,16 @@ public class ParserConnectorInputEntity implements Serializable { @Column(name = "PARSER_CONNECTOR_INPUT_ID") private String id; - @Column(name = "PARSER_INPUT_ID") + @Column(name = "PARSER_INPUT_ID", insertable = false, updatable = false) private String inputId; - @Column(name = "PARSER_OUTPUT_ID") + @Column(name = "PARSER_OUTPUT_ID", insertable = false, updatable = false) private String parentOutputId; @Column(name = "VALUE") private String value; - @Column(name = "PARSER_CONNECTOR_ID") + @Column(name = "PARSER_CONNECTOR_ID", insertable = false, updatable = false) private String parserConnectorId; @ManyToOne(targetEntity = ParserInputEntity.class, cascade = CascadeType.MERGE) diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ParserInputEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ParserInputEntity.java index e37e4c2405..ba1dbf9a27 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ParserInputEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ParserInputEntity.java @@ -37,7 +37,7 @@ public class ParserInputEntity implements Serializable { @Column(name = "PARSER_INPUT_REQUIRED") private boolean requiredInput; - @Column(name = "PARSER_ID") + @Column(name = "PARSER_ID", insertable = false, updatable = false) private String parserId; @ManyToOne(targetEntity = ParserEntity.class, cascade = CascadeType.MERGE) diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ParserOutputEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ParserOutputEntity.java index 63c2fc0a05..b1a790afc2 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ParserOutputEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ParserOutputEntity.java @@ -37,7 +37,7 @@ public class ParserOutputEntity implements Serializable { @Column(name = "PARSER_OUTPUT_REQUIRED") private boolean requiredOutput; - @Column(name = "PARSER_ID") + @Column(name = "PARSER_ID", insertable = false, updatable = false) private String parserId; @ManyToOne(targetEntity = ParserEntity.class, cascade = CascadeType.MERGE) diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ParsingTemplateInputEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ParsingTemplateInputEntity.java index d538299fa7..d2766b5b1b 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ParsingTemplateInputEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ParsingTemplateInputEntity.java @@ -31,7 +31,7 @@ public class ParsingTemplateInputEntity implements Serializable { @Column(name = "PARSING_TEMPLATE_INPUT_ID") private String id; - @Column(name = "TARGET_PARSER_INPUT_ID") + @Column(name = "TARGET_PARSER_INPUT_ID", insertable = false, updatable = false) private String targetInputId; @Column(name = "APPLICATION_OUTPUT_NAME") @@ -40,7 +40,7 @@ public class ParsingTemplateInputEntity implements Serializable { @Column(name = "VALUE") private String value; - @Column(name = "PARSING_TEMPLATE_ID") + @Column(name = "PARSING_TEMPLATE_ID", insertable = false, updatable = false) private String parsingTemplateId; @ManyToOne(targetEntity = ParserInputEntity.class, cascade = CascadeType.MERGE) diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/PostjobCommandEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/PostjobCommandEntity.java index bf330dfcc8..1930cbfbe7 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/PostjobCommandEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/PostjobCommandEntity.java @@ -34,7 +34,7 @@ public class PostjobCommandEntity implements Serializable { private static final long serialVersionUID = 1L; @Id - @Column(name = "APPDEPLOYMENT_ID") + @Column(name = "APPDEPLOYMENT_ID", insertable = false, updatable = false) private String appdeploymentId; @Id diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/PrejobCommandEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/PrejobCommandEntity.java index 81b0b51076..f99895d1e6 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/PrejobCommandEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/PrejobCommandEntity.java @@ -34,7 +34,7 @@ public class PrejobCommandEntity implements Serializable { private static final long serialVersionUID = 1L; @Id - @Column(name = "APPDEPLOYMENT_ID") + @Column(name = "APPDEPLOYMENT_ID", insertable = false, updatable = false) private String appdeploymentId; @Id diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/SSHAccountProvisionerConfiguration.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/SSHAccountProvisionerConfiguration.java index 420d83570b..c1c6a171d0 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/SSHAccountProvisionerConfiguration.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/SSHAccountProvisionerConfiguration.java @@ -30,11 +30,11 @@ @IdClass(SSHAccountProvisionerConfigurationPK.class) public class SSHAccountProvisionerConfiguration { @Id - @Column(name = "GATEWAY_ID") + @Column(name = "GATEWAY_ID", insertable = false, updatable = false) private String gatewayId; @Id - @Column(name = "RESOURCE_ID") + @Column(name = "RESOURCE_ID", insertable = false, updatable = false) private String resourceId; @Id diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/SshJobSubmissionEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/SshJobSubmissionEntity.java index de83512076..1f956e508e 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/SshJobSubmissionEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/SshJobSubmissionEntity.java @@ -38,6 +38,9 @@ public class SshJobSubmissionEntity implements Serializable { @Column(name = "JOB_SUBMISSION_INTERFACE_ID") private String jobSubmissionInterfaceId; + @Column(name = "RESOURCE_JOB_MANAGER_ID", insertable = false, updatable = false) + private String resourceJobManagerId; + @ManyToOne(cascade = CascadeType.MERGE) @JoinColumn(name = "RESOURCE_JOB_MANAGER_ID", nullable = false, updatable = false) private ResourceJobManagerEntity resourceJobManager; @@ -71,6 +74,14 @@ public void setJobSubmissionInterfaceId(String jobSubmissionInterfaceId) { this.jobSubmissionInterfaceId = jobSubmissionInterfaceId; } + public String getResourceJobManagerId() { + return resourceJobManagerId; + } + + public void setResourceJobManagerId(String resourceJobManagerId) { + this.resourceJobManagerId = resourceJobManagerId; + } + public String getAlternativeSshHostname() { return alternativeSshHostname; } diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/StorageInterfaceEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/StorageInterfaceEntity.java index 60cff0bbaa..8ca150ccd3 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/StorageInterfaceEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/StorageInterfaceEntity.java @@ -34,7 +34,7 @@ public class StorageInterfaceEntity implements Serializable { private static final long serialVersionUID = 1L; @Id - @Column(name = "STORAGE_RESOURCE_ID") + @Column(name = "STORAGE_RESOURCE_ID", insertable = false, updatable = false) private String storageResourceId; @Id diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/StoragePreferenceEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/StoragePreferenceEntity.java index e0a53b33df..3f3ed6c2de 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/StoragePreferenceEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/StoragePreferenceEntity.java @@ -31,7 +31,7 @@ public class StoragePreferenceEntity implements Serializable { private static final long serialVersionUID = 1L; - @Column(name = "GATEWAY_ID") + @Column(name = "GATEWAY_ID", insertable = false, updatable = false) @Id private String gatewayId; diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/StorageResourceEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/StorageResourceEntity.java index c17d489dac..9e5aa03c73 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/StorageResourceEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/StorageResourceEntity.java @@ -33,10 +33,11 @@ public class StorageResourceEntity implements Serializable { private static final long serialVersionUID = 1L; @Id - @Column(name = "STORAGE_RESOURCE_ID") + @Column(name = "STORAGE_RESOURCE_ID", nullable = false, length = 255) private String storageResourceId; - @Column(name = "CREATION_TIME") + @Column(name = "CREATION_TIME", nullable = false) + @Temporal(TemporalType.TIMESTAMP) private Timestamp creationTime; @Column(name = "DESCRIPTION") @@ -45,17 +46,18 @@ public class StorageResourceEntity implements Serializable { @Column(name = "ENABLED") private boolean enabled; - @Column(name = "HOST_NAME") + @Column(name = "HOST_NAME", nullable = false, length = 255) private String hostName; - @Column(name = "UPDATE_TIME") + @Column(name = "UPDATE_TIME", nullable = false) + @Temporal(TemporalType.TIMESTAMP) private Timestamp updateTime; @OneToMany( targetEntity = StorageInterfaceEntity.class, cascade = CascadeType.ALL, mappedBy = "storageResource", - fetch = FetchType.EAGER) + fetch = FetchType.LAZY) private List dataMovementInterfaces; public StorageResourceEntity() {} diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/UserComputeResourcePreferenceEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/UserComputeResourcePreferenceEntity.java index 246a8394ea..f66da07b5d 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/UserComputeResourcePreferenceEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/UserComputeResourcePreferenceEntity.java @@ -36,11 +36,11 @@ public class UserComputeResourcePreferenceEntity { private String computeResourceId; @Id - @Column(name = "USER_ID") + @Column(name = "USER_ID", insertable = false, updatable = false) private String userId; @Id - @Column(name = "GATEWAY_ID") + @Column(name = "GATEWAY_ID", insertable = false, updatable = false) private String gatewayId; @Column(name = "PREFERED_BATCH_QUEUE") diff --git a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/UserStoragePreferenceEntity.java b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/UserStoragePreferenceEntity.java index e3ae98ca6d..08700f395c 100644 --- a/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/UserStoragePreferenceEntity.java +++ b/airavata-api/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/UserStoragePreferenceEntity.java @@ -35,11 +35,11 @@ public class UserStoragePreferenceEntity { private String storageResourceId; @Id - @Column(name = "USER_ID") + @Column(name = "USER_ID", insertable = false, updatable = false) private String userId; @Id - @Column(name = "GATEWAY_ID") + @Column(name = "GATEWAY_ID", insertable = false, updatable = false) private String gatewayId; @Column(name = "RESOURCE_CS_TOKEN") diff --git a/modules/research-framework/research-service/README.md b/modules/research-framework/research-service/README.md index 79c587882c..0102ef6c6a 100644 --- a/modules/research-framework/research-service/README.md +++ b/modules/research-framework/research-service/README.md @@ -17,24 +17,434 @@ under the License. --> -# Research Service Application +# Apache Airavata Research Service -This Spring Boot application supports different profiles for running in production vs development mode. In production mode, a security filter enforces authentication. In development mode, the security filter is bypassed for easier local testing. +A comprehensive Spring Boot REST API service for managing research resources, computational infrastructure, and research workflows. This service provides a unified interface for researchers to discover, manage, and utilize computational resources and research artifacts. -## Running in Development Mode +## ๐Ÿ—๏ธ Architecture Overview -### Using Maven +The Research Service employs a **dual database architecture** designed to separate research data from infrastructure management: + +- **H2 Database (In-Memory)**: Manages v1 research resources (Projects, Datasets, Models, Notebooks, Repositories) +- **MariaDB Database**: Manages v2 infrastructure resources (Compute Resources, Storage Resources) using imported airavata-api entities +- **RESTful API**: Comprehensive v1 and v2 endpoints with different authentication requirements +- **Multi-Profile Configuration**: Supports development and production environments + +### Key Components + +- **Layered Architecture**: Controllers โ†’ Handlers/Services โ†’ Repositories โ†’ Entities +- **Authentication**: JWT + API Key dual authentication system +- **Data Conversion**: Advanced DTO-Entity mapping with JSON serialization for UI fields +- **Auto-Initialization**: Automated sample data seeding for development + +## ๐Ÿš€ Quick Start + +### 1. Prerequisites + +- Java 11+ +- Maven 3.6+ +- Docker & Docker Compose +- MariaDB client (for migrations) + +### 2. Start Airavata Database Stack + +```bash +cd "/Users/krishkatariya/dev/Professional Work/Google Summer of Code/airavata" + +# Add hostname mapping (one-time setup) +echo "127.0.0.1 airavata.host" | sudo tee -a /etc/hosts + +# Start MariaDB + Adminer web interface +docker-compose -f .devcontainer/docker-compose.yml up db adminer +``` + +**Database Access:** +- **Host**: `airavata.host:13306` +- **Username**: `airavata` +- **Password**: `123456` +- **Database**: `app_catalog` +- **Web Admin**: http://localhost:18088 + +### 3. Apply Database Migrations + +```bash +cd airavata/modules/research-framework/research-service + +# Run column length migration (REQUIRED for UI field JSON storage) +# This migration increases column lengths in airavata-api entities to support +# JSON serialization of UI-specific fields like queues, hostAliases, etc. +mysql -h airavata.host -P 13306 -u airavata -p123456 app_catalog < database-migrations/001-increase-description-column-lengths.sql +``` + +### 4. Start Research Service ```bash +# Development mode (includes sample data) mvn spring-boot:run -Dspring-boot.run.profiles=dev + +# Production mode +mvn spring-boot:run +``` + +### 5. Access the Service + +- **API Base URL**: http://localhost:8080 +- **Swagger UI**: http://localhost:8080/swagger-ui.html +- **H2 Console**: http://localhost:8080/h2-console +- **Health Check**: http://localhost:8080/actuator/health + +## ๐Ÿ“Š Database Architecture + +### Dual Database System + +#### H2 Database (v1 Resources) +- **Purpose**: Research-focused entities and sample data +- **Location**: In-memory (`jdbc:h2:mem:testdb`) +- **Entities**: `Resource`, `Project`, `ResourceStar`, `Tag`, `Session` +- **Resource Types**: `DatasetResource`, `ModelResource`, `NotebookResource`, `RepositoryResource` +- **Sample Data**: 39+ resources across neuroscience research projects + +#### MariaDB Database (v2 Infrastructure) +- **Purpose**: Production infrastructure and computational resources +- **Location**: `airavata.host:13306/app_catalog` +- **Entities**: `ComputeResourceEntity`, `StorageResourceEntity` (imported from airavata-api) +- **Resource Types**: HPC clusters, supercomputers, cloud resources, storage systems +- **Sample Data**: 12+ infrastructure resources + +### Data Initializers + +- **`DatasetInitializer`**: Creates 9 research datasets (all profiles) +- **`DevDataInitializer`**: Creates 10 neuroscience projects with full resource sets (dev profile only) + +## ๐Ÿ” Authentication + +### JWT Authentication (Users) +```bash +# Headers for authenticated requests +Authorization: Bearer +X-Claims: {"userName":"user@domain.com","gatewayID":"default"} +``` + +### API Key Authentication (Services) +```bash +# Headers for service requests +X-API-Key: dev-research-api-key-12345 +``` + +### Development Token Generation +```bash +# Generate test JWT +curl -X POST http://localhost:8080/api/dev/auth/token \ + -H "Content-Type: application/json" \ + -d '{"email":"test@example.com","name":"Test User"}' +``` + +## ๐ŸŒ API Endpoints + +### V1 API - Research Resources (H2 Database) + +#### Projects (`/api/v1/rf/projects`) +- `GET /` - List all projects +- `GET /{ownerId}` - Get projects by owner +- `POST /` - Create new project +- `DELETE /{projectId}` - Delete project + +#### Resources (`/api/v1/rf/resources`) +- `GET /public` - List all public resources (with pagination, filtering) +- `GET /public/{id}` - Get resource by ID +- `GET /public/tags/all` - Get all tags sorted by popularity +- `GET /search` - Search resources by type and name +- `POST /dataset` - Create dataset resource +- `POST /notebook` - Create notebook resource +- `POST /repository` - Create repository resource +- `POST /model` - Create model resource +- `PATCH /repository` - Modify repository resource +- `DELETE /{id}` - Delete resource +- `POST /{id}/star` - Star/unstar resource +- `GET /{id}/star` - Check star status +- `GET /resources/{id}/count` - Get star count +- `GET /{userId}/stars` - Get user's starred resources + +#### Sessions (`/api/v1/rf/sessions`) +- `GET /` - List user sessions (with status filtering) +- `PATCH /{sessionId}` - Update session status +- `DELETE /{sessionId}` - Delete session + +#### Research Hub (`/api/v1/rf/hub`) +- `GET /start/project/{projectId}` - Start hub session for project +- `GET /resume/session/{sessionId}` - Resume existing session + +### V2 API - Infrastructure Resources (MariaDB) + +#### Compute Resources (`/api/v2/rf/compute-resources`) ๐Ÿ”’ +- `GET /` - List compute resources (with name search) +- `GET /{id}` - Get compute resource by ID +- `POST /` - Create compute resource +- `PUT /{id}` - Update compute resource +- `DELETE /{id}` - Delete compute resource +- `GET /search` - Search by keyword +- `POST /{id}/star` - Star/unstar resource +- `GET /{id}/star` - Check star status +- `GET /{id}/stars/count` - Get star count +- `GET /starred` - Get starred resources + +#### Storage Resources (`/api/v2/rf/storage-resources`) ๐Ÿ”’ +- `GET /` - List storage resources (with name search) +- `GET /{id}` - Get storage resource by ID +- `POST /` - Create storage resource +- `PUT /{id}` - Update storage resource +- `DELETE /{id}` - Delete storage resource +- `GET /search` - Search by keyword +- `GET /type/{storageType}` - Filter by storage type +- `POST /{id}/star` - Star/unstar resource +- `GET /{id}/star` - Check star status +- `GET /{id}/stars/count` - Get star count +- `GET /starred` - Get starred resources + +### Development APIs (`/api/dev`) +- `POST /auth/token` - Generate test JWT token +- `POST /auth/api-key-info` - Get API key information + +๐Ÿ”’ = Requires authentication (JWT or API Key) + +## ๐Ÿ“ Sample API Requests + +### Create Compute Resource +```json +POST /api/v2/rf/compute-resources/ +Headers: X-API-Key: dev-research-api-key-12345 + +{ + "name": "Titan Supercomputer", + "resourceDescription": "A powerful HPC cluster for scientific simulations", + "hostName": "titan.supercluster.edu", + "computeType": "HPC", + "cpuCores": 299008, + "memoryGB": 710000, + "operatingSystem": "Cray Linux Environment", + "hostAliases": ["titan-login1.supercluster.edu"], + "ipAddresses": ["128.219.10.1"], + "sshPort": 22, + "alternativeSSHHostName": "titan-login.supercluster.edu", + "securityProtocol": "SSH_KEYS", + "resourceJobManagerType": "SLURM", + "dataMovementProtocol": "SCP", + "queueSystem": "SLURM", + "resourceManager": "XSEDE", + "queues": [ + { + "queueName": "default", + "maxNodes": 100, + "maxProcessors": 2048, + "maxRunTime": 7200 + } + ], + "enabled": true +} ``` -### Using IntelliJ IDEA +### Create Storage Resource (S3) +```json +POST /api/v2/rf/storage-resources/ +Headers: X-API-Key: dev-research-api-key-12345 -1. Go to Run > Edit Configurations. +{ + "name": "S3 Research Storage", + "hostName": "s3.amazonaws.com", + "storageResourceDescription": "AWS S3 bucket for research data", + "storageType": "S3", + "capacityTB": 1000, + "accessProtocol": "S3", + "endpoint": "https://s3.amazonaws.com", + "supportsEncryption": true, + "supportsVersioning": true, + "bucketName": "my-research-bucket", + "accessKey": "AKIAIOSFODNN7EXAMPLE", + "secretKey": "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY", + "resourceManager": "AWS", + "enabled": true +} +``` + +## ๐Ÿงช Development Configuration + +### IntelliJ IDEA Setup +1. Go to **Run > Edit Configurations** 2. Select your Spring Boot run configuration -3. In the Program arguments field, add: +3. In **Program arguments**, add: `--spring.profiles.active=dev` + +### Environment Variables +```bash +# Override database configuration +export SPRING_DATASOURCE_URL=jdbc:mariadb://custom-host:3306/app_catalog +export SPRING_DATASOURCE_USERNAME=custom_user +export SPRING_DATASOURCE_PASSWORD=custom_password + +# Override authentication +export RESEARCH_AUTH_DEV_API_KEY=your-custom-api-key +``` + +### Profile-Specific Behavior +- **Default Profile**: Production-ready with minimal sample data +- **Dev Profile**: Includes comprehensive sample data and relaxed security + +## ๐Ÿ”ง Key Features + +### Enhanced Data Management +- **Field Preservation**: Complete round-trip data integrity for complex objects +- **JSON Serialization**: Advanced UI field embedding in database description columns +- **Validation**: Comprehensive Jakarta Bean Validation with detailed error messages + +### Search & Discovery +- **Multi-faceted Search**: Search by keyword, type, tags, author, language, framework +- **Pagination**: Configurable page size and sorting +- **Popularity Metrics**: Star counts and trending algorithms + +### Integration Ready +- **CORS Support**: Configurable for frontend integration +- **OpenAPI Documentation**: Auto-generated Swagger documentation +- **Health Monitoring**: Spring Boot Actuator endpoints +## ๐Ÿ› Troubleshooting + +### Common Issues + +**Database Connection Failed** ```bash ---spring.profiles.active=dev +# Verify database is running +docker ps | grep mariadb + +# Check hostname mapping +ping airavata.host + +# Test database connection +mysql -h airavata.host -P 13306 -u airavata -p123456 -e "SHOW DATABASES;" ``` + +**Column Length Errors** +```bash +# Apply the database migration +mysql -h airavata.host -P 13306 -u airavata -p123456 app_catalog < database-migrations/001-increase-description-column-lengths.sql +``` + +**Authentication Issues** +- Verify JWT token format and claims +- Check API key matches configuration +- Ensure proper headers are set + +### Logging +```bash +# Enable debug logging +export LOGGING_LEVEL_ORG_SPRINGFRAMEWORK_SECURITY=DEBUG +``` + +## ๐Ÿ“š Development Resources + +- **API Documentation**: http://localhost:8080/swagger-ui.html +- **Database Console**: http://localhost:8080/h2-console (H2 only) +- **Source Code**: `src/main/java/org/apache/airavata/research/service/` +- **Configuration**: `src/main/resources/application.yml` +- **Migrations**: `database-migrations/` + +## ๐Ÿค Contributing + +When making significant changes to the Research Service: + +1. **Update Documentation**: Keep this README current with new endpoints, configuration changes, and architectural updates +2. **Run Tests**: Ensure all existing functionality remains intact +3. **Database Migrations**: Create numbered migration scripts in `database-migrations/` for schema changes +4. **API Documentation**: Update Swagger annotations for new endpoints + +--- + +## ๐Ÿ“‹ UI vs Database Field Mapping + +### Compute Resources + +#### UI Fields (Frontend Forms) +- `name` - Resource display name +- `resourceDescription` - Resource description +- `hostName` - Primary hostname +- `computeType` - Type of compute resource (HPC, Cloud, etc.) +- `cpuCores` - Number of CPU cores +- `memoryGB` - Memory in gigabytes +- `operatingSystem` - Operating system +- `hostAliases` - Array of alternative hostnames +- `ipAddresses` - Array of IP addresses +- `sshPort` - SSH port number +- `alternativeSSHHostName` - Alternative SSH hostname +- `securityProtocol` - Security protocol (SSH_KEYS, etc.) +- `resourceJobManagerType` - Job manager type (SLURM, PBS, etc.) +- `dataMovementProtocol` - Data movement protocol (SCP, SFTP, etc.) +- `queueSystem` - Queue system type +- `resourceManager` - Resource manager name +- `queues` - Array of queue configurations with maxNodes, maxProcessors, maxRunTime +- `enabled` - Boolean status + +#### App Catalog Database Fields (airavata-api entities) +- `computeResourceId` - Primary key UUID +- `computeResourceDescription` - Resource description +- `hostName` - Primary hostname +- `hostAliases` - JSON array in description field +- `ipAddresses` - JSON array in description field +- `resourceJobManagerType` - From JobSubmissionInterface enum +- `gatewayUsageReporting` - Boolean flag +- `gatewayUsageModuleLoadCommand` - Command string +- `gatewayUsageExecutable` - Executable path +- `cpuCount` - CPU core count +- `nodeCount` - Node count +- `ppn` - Processes per node +- `maxRunTime` - Maximum runtime +- `memoryPerNode` - Memory per node +- `loginUserName` - Login username +- `scratchLocation` - Scratch directory +- `allocationProjectNumber` - Project allocation number +- `resourceSpecificCredentialStoreToken` - Credential token +- `usageReportingGatewayId` - Gateway ID for reporting +- `creationTime` - Timestamp +- `updateTime` - Timestamp + +### Storage Resources + +#### UI Fields (Frontend Forms) +- `name` - Storage resource name +- `hostName` - Storage hostname +- `storageResourceDescription` - Description +- `storageType` - Type (S3, SFTP, etc.) +- `capacityTB` - Storage capacity in TB +- `accessProtocol` - Access protocol +- `endpoint` - Storage endpoint URL +- `supportsEncryption` - Encryption support boolean +- `supportsVersioning` - Versioning support boolean +- `bucketName` - S3 bucket name (S3 specific) +- `accessKey` - Access key (S3 specific) +- `secretKey` - Secret key (S3 specific) +- `resourceManager` - Resource manager name +- `enabled` - Boolean status + +#### App Catalog Database Fields (airavata-api entities) +- `storageResourceId` - Primary key UUID +- `hostName` - Storage hostname +- `storageResourceDescription` - Description +- `enabled` - Boolean status +- `creationTime` - Timestamp +- `updateTime` - Timestamp +- DataMovementInterface relations for protocol-specific configurations +- Storage-specific fields stored in related entities and JSON serialization + +### Field Mapping Strategy + +**Current Implementation (Post-August 2025):** +- **Direct Field Mapping**: UI fields map directly to DTO fields which map to entity fields +- **No JSON Injection**: Complex UI fields (arrays, objects) are handled through proper entity relationships +- **Entity Relationships**: Uses airavata-api's JobSubmissionInterface, DataMovementInterface, BatchQueue entities +- **Description Field**: Used only for actual descriptions, no JSON serialization + +**Previous Implementation (Pre-August 2025):** +- **JSON-in-Description**: UI-specific fields were serialized as JSON in the description column +- **Field Mismatch**: UI used deprecated field names that didn't match backend entities +- **Workaround**: DTOConverter extracted/encoded JSON from description fields + +--- + +**Apache Airavata Research Service** - Empowering scientific discovery through unified research resource management. \ No newline at end of file diff --git a/modules/research-framework/research-service/database-migrations/001-increase-description-column-lengths.sql b/modules/research-framework/research-service/database-migrations/001-increase-description-column-lengths.sql new file mode 100644 index 0000000000..42cf382922 --- /dev/null +++ b/modules/research-framework/research-service/database-migrations/001-increase-description-column-lengths.sql @@ -0,0 +1,25 @@ +-- Migration: Increase Description Column Lengths +-- Date: 2025-01-02 +-- Issue: "Data too long for column" error when serializing UI fields to JSON +-- +-- This migration increases the column lengths for RESOURCE_DESCRIPTION and DESCRIPTION +-- columns to accommodate enhanced JSON serialization of UI fields including: +-- - name, hostAliases, ipAddresses, queues (compute resources) +-- - name and additional UI fields (storage resources) + +-- Increase RESOURCE_DESCRIPTION column length in COMPUTE_RESOURCE table +ALTER TABLE COMPUTE_RESOURCE MODIFY COLUMN RESOURCE_DESCRIPTION VARCHAR(2048); + +-- Increase DESCRIPTION column length in STORAGE_RESOURCE table +ALTER TABLE STORAGE_RESOURCE MODIFY COLUMN DESCRIPTION VARCHAR(2048); + +-- Verification query (optional - run to confirm changes) +-- SELECT +-- TABLE_NAME, +-- COLUMN_NAME, +-- DATA_TYPE, +-- CHARACTER_MAXIMUM_LENGTH +-- FROM INFORMATION_SCHEMA.COLUMNS +-- WHERE TABLE_SCHEMA = 'app_catalog' +-- AND TABLE_NAME IN ('COMPUTE_RESOURCE', 'STORAGE_RESOURCE') +-- AND COLUMN_NAME IN ('RESOURCE_DESCRIPTION', 'DESCRIPTION'); \ No newline at end of file diff --git a/modules/research-framework/research-service/pom.xml b/modules/research-framework/research-service/pom.xml index 79deaf1ee8..0b033c3dac 100644 --- a/modules/research-framework/research-service/pom.xml +++ b/modules/research-framework/research-service/pom.xml @@ -50,6 +50,11 @@ under the License. + + com.h2database + h2 + runtime + org.springframework.boot spring-boot-starter-web @@ -173,6 +178,38 @@ under the License. + + + + org.springframework.boot + spring-boot-starter-oauth2-resource-server + + + ch.qos.logback + logback-classic + + + ch.qos.logback + logback-core + + + + + + + org.springframework.security + spring-security-oauth2-jose + + + ch.qos.logback + logback-classic + + + ch.qos.logback + logback-core + + + @@ -229,6 +266,7 @@ under the License. maven-compiler-plugin 17 + true diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/ResearchServiceApplication.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/ResearchServiceApplication.java index c24f8e75c6..adf3686948 100644 --- a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/ResearchServiceApplication.java +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/ResearchServiceApplication.java @@ -22,10 +22,8 @@ import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.data.jpa.repository.config.EnableJpaAuditing; -import org.springframework.data.jpa.repository.config.EnableJpaRepositories; @SpringBootApplication -@EnableJpaRepositories() @EnableJpaAuditing public class ResearchServiceApplication { public static void main(String[] args) { diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/ApiKeyAuthenticationFilter.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/ApiKeyAuthenticationFilter.java new file mode 100644 index 0000000000..399128dc0d --- /dev/null +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/ApiKeyAuthenticationFilter.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.airavata.research.service.config; + +import jakarta.servlet.FilterChain; +import jakarta.servlet.ServletException; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.security.core.authority.SimpleGrantedAuthority; +import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.stereotype.Component; +import org.springframework.web.filter.OncePerRequestFilter; + +import java.io.IOException; +import java.util.Arrays; + +@Component +public class ApiKeyAuthenticationFilter extends OncePerRequestFilter { + + private final String devApiKey; + private static final String API_KEY_HEADER = "X-API-Key"; + + public ApiKeyAuthenticationFilter(@Value("${research.auth.dev-api-key}") String devApiKey) { + this.devApiKey = devApiKey; + } + + @Override + protected void doFilterInternal(HttpServletRequest request, + HttpServletResponse response, + FilterChain filterChain) throws ServletException, IOException { + + String apiKey = request.getHeader(API_KEY_HEADER); + + if (apiKey != null && devApiKey.equals(apiKey)) { + // Create API key authentication + ApiKeyAuthenticationToken authentication = new ApiKeyAuthenticationToken( + "api-key-user", + null, + Arrays.asList(new SimpleGrantedAuthority("ROLE_API_USER")) + ); + + SecurityContextHolder.getContext().setAuthentication(authentication); + } + + filterChain.doFilter(request, response); + } +} \ No newline at end of file diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/ApiKeyAuthenticationToken.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/ApiKeyAuthenticationToken.java new file mode 100644 index 0000000000..b2d5fe08a0 --- /dev/null +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/ApiKeyAuthenticationToken.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.airavata.research.service.config; + +import org.springframework.security.authentication.AbstractAuthenticationToken; +import org.springframework.security.core.GrantedAuthority; + +import java.util.Collection; + +public class ApiKeyAuthenticationToken extends AbstractAuthenticationToken { + private final Object principal; + private Object credentials; + + public ApiKeyAuthenticationToken(Object principal, Object credentials, Collection authorities) { + super(authorities); + this.principal = principal; + this.credentials = credentials; + setAuthenticated(true); + } + + @Override + public Object getCredentials() { + return credentials; + } + + @Override + public Object getPrincipal() { + return principal; + } +} \ No newline at end of file diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/AppCatalogDatabaseConfig.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/AppCatalogDatabaseConfig.java new file mode 100644 index 0000000000..c4637c4fe2 --- /dev/null +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/AppCatalogDatabaseConfig.java @@ -0,0 +1,91 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.airavata.research.service.config; + +import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.jpa.repository.config.EnableJpaRepositories; +import org.springframework.orm.jpa.JpaTransactionManager; +import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; +import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter; +import org.springframework.transaction.PlatformTransactionManager; + +import javax.sql.DataSource; +import java.util.Properties; + +@Configuration +@EnableJpaRepositories( + basePackages = "org.apache.airavata.research.service.repository", + entityManagerFactoryRef = "appCatalogEntityManagerFactory", + transactionManagerRef = "appCatalogTransactionManager" +) +public class AppCatalogDatabaseConfig { + + @Bean + @ConfigurationProperties("app.datasource.app-catalog") + public DataSourceProperties appCatalogDataSourceProperties() { + return new DataSourceProperties(); + } + + @Bean + public DataSource appCatalogDataSource() { + return appCatalogDataSourceProperties() + .initializeDataSourceBuilder() + .build(); + } + + @Bean + public LocalContainerEntityManagerFactoryBean appCatalogEntityManagerFactory() { + LocalContainerEntityManagerFactoryBean em = new LocalContainerEntityManagerFactoryBean(); + em.setDataSource(appCatalogDataSource()); + em.setPersistenceUnitName("appCatalogPU"); + + // Scan airavata-api entities instead of local replicated entities + em.setPackagesToScan("org.apache.airavata.registry.core.entities.appcatalog"); + + HibernateJpaVendorAdapter vendorAdapter = new HibernateJpaVendorAdapter(); + vendorAdapter.setGenerateDdl(false); // Don't modify existing schema + vendorAdapter.setShowSql(false); + em.setJpaVendorAdapter(vendorAdapter); + + Properties props = new Properties(); + props.setProperty("hibernate.hbm2ddl.auto", "none"); // Don't modify existing schema + props.setProperty("hibernate.dialect", "org.hibernate.dialect.MariaDBDialect"); + props.setProperty("hibernate.format_sql", "true"); + props.setProperty("hibernate.show_sql", "false"); + + // Disable validation to avoid issues with existing data + props.setProperty("hibernate.validator.apply_to_ddl", "false"); + props.setProperty("hibernate.check_nullability", "false"); + props.setProperty("jakarta.persistence.validation.mode", "NONE"); + + em.setJpaProperties(props); + + return em; + } + + @Bean + public PlatformTransactionManager appCatalogTransactionManager() { + JpaTransactionManager transactionManager = new JpaTransactionManager(); + transactionManager.setEntityManagerFactory(appCatalogEntityManagerFactory().getObject()); + return transactionManager; + } +} \ No newline at end of file diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/AuthzTokenFilter.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/AuthzTokenFilter.java index fa55625547..cf29195c95 100644 --- a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/AuthzTokenFilter.java +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/AuthzTokenFilter.java @@ -26,6 +26,9 @@ import jakarta.servlet.http.HttpServletRequest; import jakarta.servlet.http.HttpServletResponse; import java.io.IOException; +import java.util.Arrays; +import java.util.Base64; +import java.util.HashMap; import java.util.Map; import org.apache.airavata.model.security.AuthzToken; import org.apache.airavata.model.user.UserProfile; @@ -84,19 +87,38 @@ protected void doFilterInternal(HttpServletRequest request, HttpServletResponse return; } - if (authorizationHeader != null && authorizationHeader.startsWith("Bearer ") && xClaimsHeader != null) { + if (authorizationHeader != null && authorizationHeader.startsWith("Bearer ")) { try { String accessToken = authorizationHeader.substring(7); // Remove "Bearer " prefix - ObjectMapper objectMapper = new ObjectMapper(); - Map claimsMap = objectMapper.readValue(xClaimsHeader, new TypeReference<>() {}); + Map claimsMap; + + // Primary: Use X-Claims header if available (frontend compatibility) + if (xClaimsHeader != null) { + ObjectMapper objectMapper = new ObjectMapper(); + claimsMap = objectMapper.readValue(xClaimsHeader, new TypeReference<>() {}); + LOGGER.debug("Using claims from X-Claims header"); + } else { + // Fallback: Extract claims from JWT payload for pure OAuth2/OIDC clients + claimsMap = extractClaimsFromJWT(accessToken); + LOGGER.debug("Using claims extracted from JWT payload"); + } AuthzToken authzToken = new AuthzToken(); authzToken.setAccessToken(accessToken); authzToken.setClaimsMap(claimsMap); UserContext.setAuthzToken(authzToken); - UserProfile userProfile = airavataService.getUserProfile( - authzToken, getClaim(authzToken, USERNAME_CLAIM), getClaim(authzToken, GATEWAY_CLAIM)); + // Create UserProfile from JWT claims directly (no external UserProfileService needed) + UserProfile userProfile = new UserProfile(); + userProfile.setUserId(getClaim(authzToken, USERNAME_CLAIM)); + userProfile.setGatewayId(getClaim(authzToken, GATEWAY_CLAIM)); + + // Set email from JWT if available + String email = getOptionalClaim(authzToken, "email"); + if (email != null) { + userProfile.setEmails(Arrays.asList(email)); + } + UserContext.setUser(userProfile); } catch (Exception e) { LOGGER.error("Invalid authorization data", e); @@ -111,6 +133,60 @@ protected void doFilterInternal(HttpServletRequest request, HttpServletResponse } } + /** + * Extract claims from JWT payload as fallback when X-Claims header is not present + * This enables pure OAuth2/OIDC clients to work without custom headers + */ + private Map extractClaimsFromJWT(String jwt) { + try { + // Split JWT into parts (header.payload.signature) + String[] parts = jwt.split("\\."); + if (parts.length != 3) { + throw new IllegalArgumentException("Invalid JWT format"); + } + + // Decode the payload (second part) + String payload = new String(Base64.getUrlDecoder().decode(parts[1])); + + // Parse JSON payload + ObjectMapper objectMapper = new ObjectMapper(); + Map jwtClaims = objectMapper.readValue(payload, new TypeReference<>() {}); + + // Convert to string map and extract required claims + Map claimsMap = new HashMap<>(); + + // Map standard OIDC claims to Airavata claims + String email = getClaimValue(jwtClaims, "email", "preferred_username", "sub"); + if (email != null) { + claimsMap.put(USERNAME_CLAIM, email); + } + + // Default gateway for JWT-only clients + claimsMap.put(GATEWAY_CLAIM, "default"); + + LOGGER.debug("Extracted claims from JWT: userName={}, gatewayID={}", + claimsMap.get(USERNAME_CLAIM), claimsMap.get(GATEWAY_CLAIM)); + + return claimsMap; + } catch (Exception e) { + LOGGER.error("Failed to extract claims from JWT", e); + throw new IllegalArgumentException("Invalid JWT token", e); + } + } + + /** + * Get claim value from JWT payload, trying multiple possible claim names + */ + private String getClaimValue(Map jwtClaims, String... claimNames) { + for (String claimName : claimNames) { + Object value = jwtClaims.get(claimName); + if (value != null) { + return value.toString(); + } + } + return null; + } + private static String getClaim(AuthzToken authzToken, String claimId) { return authzToken.getClaimsMap().entrySet().stream() .filter(entry -> entry.getKey().equalsIgnoreCase(claimId)) @@ -119,4 +195,12 @@ private static String getClaim(AuthzToken authzToken, String claimId) { .orElseThrow(() -> new IllegalArgumentException("Missing '" + claimId + "' claim in the authentication token")); } + + private static String getOptionalClaim(AuthzToken authzToken, String claimId) { + return authzToken.getClaimsMap().entrySet().stream() + .filter(entry -> entry.getKey().equalsIgnoreCase(claimId)) + .map(Map.Entry::getValue) + .findFirst() + .orElse(null); + } } diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/CustomAuthenticationEntryPoint.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/CustomAuthenticationEntryPoint.java new file mode 100644 index 0000000000..addf98b442 --- /dev/null +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/CustomAuthenticationEntryPoint.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.airavata.research.service.config; + +import com.fasterxml.jackson.databind.ObjectMapper; +import jakarta.servlet.ServletException; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; +import org.springframework.http.MediaType; +import org.springframework.security.core.AuthenticationException; +import org.springframework.security.web.AuthenticationEntryPoint; +import org.springframework.stereotype.Component; + +import java.io.IOException; +import java.time.Instant; +import java.util.HashMap; +import java.util.Map; + +@Component +public class CustomAuthenticationEntryPoint implements AuthenticationEntryPoint { + + private final ObjectMapper objectMapper = new ObjectMapper(); + + @Override + public void commence(HttpServletRequest request, HttpServletResponse response, + AuthenticationException authException) throws IOException, ServletException { + + response.setStatus(HttpServletResponse.SC_UNAUTHORIZED); + response.setContentType(MediaType.APPLICATION_JSON_VALUE); + + Map errorResponse = new HashMap<>(); + errorResponse.put("timestamp", Instant.now().toString()); + errorResponse.put("status", 401); + errorResponse.put("error", "Unauthorized"); + errorResponse.put("message", "Authentication required. Please provide a valid JWT token or X-API-Key header."); + errorResponse.put("path", request.getRequestURI()); + + String authHeader = request.getHeader("Authorization"); + String apiKeyHeader = request.getHeader("X-API-Key"); + + if (authHeader == null && apiKeyHeader == null) { + errorResponse.put("details", "Missing authentication. Include either 'Authorization: Bearer ' or 'X-API-Key: ' header."); + } else if (authHeader != null && !authHeader.startsWith("Bearer ")) { + errorResponse.put("details", "Invalid Authorization header format. Use 'Authorization: Bearer '."); + } else if (apiKeyHeader != null) { + errorResponse.put("details", "Invalid API key provided."); + } else { + errorResponse.put("details", "Invalid or expired authentication token."); + } + + response.getWriter().write(objectMapper.writeValueAsString(errorResponse)); + } +} \ No newline at end of file diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/CustomEntityManagerFactoryBean.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/CustomEntityManagerFactoryBean.java new file mode 100644 index 0000000000..df4048c39e --- /dev/null +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/CustomEntityManagerFactoryBean.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.airavata.research.service.config; + +import org.apache.airavata.registry.core.entities.appcatalog.ComputeResourceEntity; +import org.apache.airavata.registry.core.entities.appcatalog.StorageResourceEntity; +import org.hibernate.boot.Metadata; +import org.hibernate.boot.MetadataSources; +import org.hibernate.boot.registry.StandardServiceRegistry; +import org.hibernate.boot.registry.StandardServiceRegistryBuilder; +import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; + +import javax.sql.DataSource; +import jakarta.persistence.EntityManagerFactory; +import java.util.Properties; + +/** + * Custom EntityManagerFactory that programmatically registers ONLY the specific + * entities we need, completely bypassing package scanning to avoid ParserInputEntity + * and other problematic entities. + */ +public class CustomEntityManagerFactoryBean extends LocalContainerEntityManagerFactoryBean { + + private final DataSource dataSource; + private final Properties hibernateProperties; + + public CustomEntityManagerFactoryBean(DataSource dataSource, Properties hibernateProperties) { + this.dataSource = dataSource; + this.hibernateProperties = hibernateProperties; + setDataSource(dataSource); + setPersistenceUnitName("appCatalogPU"); + // DO NOT set packages to scan - we will register entities manually + } + + @Override + protected EntityManagerFactory createNativeEntityManagerFactory() { + // Create Hibernate service registry with our properties + StandardServiceRegistryBuilder registryBuilder = new StandardServiceRegistryBuilder(); + + // Add datasource configuration + hibernateProperties.put("hibernate.connection.datasource", dataSource); + registryBuilder.applySettings(hibernateProperties); + + StandardServiceRegistry registry = registryBuilder.build(); + + try { + // Create metadata sources and add ONLY our specific entities + MetadataSources metadataSources = new MetadataSources(registry); + + // CRITICAL: Add ONLY the entities we need - no package scanning + metadataSources.addAnnotatedClass(ComputeResourceEntity.class); + metadataSources.addAnnotatedClass(StorageResourceEntity.class); + + // Build metadata and create session factory + Metadata metadata = metadataSources.buildMetadata(); + + // Return the EntityManagerFactory from the session factory + return metadata.buildSessionFactory().unwrap(EntityManagerFactory.class); + + } catch (Exception e) { + StandardServiceRegistryBuilder.destroy(registry); + throw new RuntimeException("Failed to create EntityManagerFactory", e); + } + } +} \ No newline at end of file diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/DatasetInitializer.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/DatasetInitializer.java new file mode 100644 index 0000000000..f120e8e17e --- /dev/null +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/DatasetInitializer.java @@ -0,0 +1,211 @@ +/** +* +* Licensed to the Apache Software Foundation (ASF) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The ASF licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, +* software distributed under the License is distributed on an +* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +* KIND, either express or implied. See the License for the +* specific language governing permissions and limitations +* under the License. +*/ +package org.apache.airavata.research.service.config; + +import jakarta.annotation.PostConstruct; +import java.util.HashSet; +import java.util.Set; +import org.apache.airavata.research.service.enums.PrivacyEnum; +import org.apache.airavata.research.service.enums.StateEnum; +import org.apache.airavata.research.service.enums.StatusEnum; +import org.apache.airavata.research.service.model.entity.DatasetResource; +import org.apache.airavata.research.service.model.entity.Tag; +import org.apache.airavata.research.service.model.repo.ResourceRepository; +import org.apache.airavata.research.service.model.repo.TagRepository; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Component; + +/** + * Dataset Initializer for creating sample dataset resources + * Runs automatically without requiring dev-local profile + */ +@Component +public class DatasetInitializer { + + private static final Logger LOGGER = LoggerFactory.getLogger(DatasetInitializer.class); + + private final ResourceRepository resourceRepository; + private final TagRepository tagRepository; + + public DatasetInitializer(ResourceRepository resourceRepository, TagRepository tagRepository) { + this.resourceRepository = resourceRepository; + this.tagRepository = tagRepository; + } + + @PostConstruct + public void initializeDatasets() { + LOGGER.info("Initializing dataset resources..."); + + try { + // Only initialize if no datasets exist + long datasetCount = resourceRepository.findAll().stream() + .filter(resource -> resource instanceof DatasetResource) + .count(); + + if (datasetCount == 0) { + LOGGER.info("Creating sample dataset resources..."); + createSampleDatasets(); + LOGGER.info("Dataset initialization completed."); + } else { + LOGGER.info("Datasets already exist. Skipping initialization."); + } + } catch (Exception e) { + LOGGER.error("Error during dataset initialization: {}", e.getMessage(), e); + throw new RuntimeException("Failed to initialize dataset resources", e); + } + } + + private void createSampleDatasets() { + // Create diverse sample datasets + DatasetData[] datasetArray = { + new DatasetData( + "Lung CT Scans Database", + "A comprehensive collection of 3D lung CT images for medical imaging research and machine learning model training.", + "lung-ct-scans-db", + Set.of("medical@imaging.lab", "ssaggi3@gatech.edu"), + Set.of("medical", "ct-scans", "lungs", "imaging") + ), + + new DatasetData( + "Financial Fraud Dataset", + "Large-scale dataset of credit card transactions with fraud labels for developing fraud detection systems.", + "financial-fraud-dataset", + Set.of("fraud@detection.org", "security@fintech.com"), + Set.of("finance", "fraud", "transactions", "cybersecurity") + ), + + new DatasetData( + "Stock Market Data", + "Historical stock prices and trading volumes for major market indices over the past 20 years.", + "stock-market-historical-data", + Set.of("market@data.finance", "trading@analytics.com"), + Set.of("finance", "stocks", "time-series", "trading") + ), + + new DatasetData( + "Drug Compound Library", + "Chemical compound structures and properties for pharmaceutical research and drug discovery.", + "drug-compound-library", + Set.of("pharma@research.edu", "compounds@drugdev.org"), + Set.of("healthcare", "compounds", "pharmaceuticals", "chemistry") + ), + + new DatasetData( + "Social Media Sentiment", + "Annotated social media posts with sentiment labels for natural language processing and sentiment analysis.", + "social-media-sentiment-dataset", + Set.of("nlp@sentiment.lab", "text@analysis.ai"), + Set.of("nlp", "sentiment", "social-media", "text") + ), + + new DatasetData( + "Protein Sequences", + "Large collection of protein sequences with structural annotations for bioinformatics research.", + "protein-sequences-annotated", + Set.of("bio@sequences.org", "proteins@research.edu"), + Set.of("life-sciences", "proteins", "sequences", "bioinformatics") + ), + + new DatasetData( + "ImageNet Subset", + "Curated subset of ImageNet for computer vision benchmarking and deep learning model evaluation.", + "imagenet-curated-subset", + Set.of("vision@datasets.org", "images@cv.lab"), + Set.of("computer-vision", "images", "classification", "benchmark") + ), + + new DatasetData( + "Malware Samples", + "Classified malware samples for cybersecurity research and threat detection system training.", + "malware-samples-classified", + Set.of("security@malware.lab", "threats@cyber.defense"), + Set.of("cybersecurity", "malware", "security", "threats") + ), + + new DatasetData( + "Climate Data Collection", + "Long-term climate measurements including temperature, precipitation, and atmospheric data.", + "climate-data-collection", + Set.of("climate@research.org", "weather@prediction.lab"), + Set.of("climate", "environmental", "weather", "data-analysis") + ) + }; + + // Create datasets from sample data + for (DatasetData datasetData : datasetArray) { + DatasetResource dataset = createDatasetFromData(datasetData); + resourceRepository.save(dataset); + } + + LOGGER.info("Created {} dataset resources", datasetArray.length); + } + + private DatasetResource createDatasetFromData(DatasetData data) { + DatasetResource dataset = new DatasetResource(); + dataset.setName(data.name); + dataset.setDescription(data.description); + dataset.setDatasetUrl(data.datasetUrl); + + // Set default Resource fields (inherited) + dataset.setPrivacy(PrivacyEnum.PUBLIC); + dataset.setState(StateEnum.ACTIVE); + dataset.setStatus(StatusEnum.VERIFIED); + dataset.setAuthors(new HashSet<>(data.authors)); + dataset.setTags(getOrCreateTags(data.tags)); + dataset.setHeaderImage(""); // Default empty header image + + return dataset; + } + + private Set getOrCreateTags(Set tagNames) { + Set tags = new HashSet<>(); + for (String tagName : tagNames) { + Tag existingTag = tagRepository.findByValue(tagName); + if (existingTag != null) { + tags.add(existingTag); + } else { + Tag newTag = new Tag(); + newTag.setValue(tagName); + Tag savedTag = tagRepository.save(newTag); + tags.add(savedTag); + } + } + return tags; + } + + // Helper class for organizing sample data + private static class DatasetData { + final String name; + final String description; + final String datasetUrl; + final Set authors; + final Set tags; + + public DatasetData(String name, String description, String datasetUrl, + Set authors, Set tags) { + this.name = name; + this.description = description; + this.datasetUrl = datasetUrl; + this.authors = authors; + this.tags = tags; + } + } +} \ No newline at end of file diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/DevDataInitializer.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/DevDataInitializer.java index ceb321fbbf..c864edbe68 100644 --- a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/DevDataInitializer.java +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/DevDataInitializer.java @@ -22,8 +22,11 @@ import java.util.HashSet; import java.util.Set; import org.apache.airavata.research.service.enums.PrivacyEnum; +import org.apache.airavata.research.service.enums.StateEnum; import org.apache.airavata.research.service.enums.StatusEnum; import org.apache.airavata.research.service.model.entity.DatasetResource; +import org.apache.airavata.research.service.model.entity.ModelResource; +import org.apache.airavata.research.service.model.entity.NotebookResource; import org.apache.airavata.research.service.model.entity.Project; import org.apache.airavata.research.service.model.entity.RepositoryResource; import org.apache.airavata.research.service.model.entity.Tag; @@ -80,6 +83,7 @@ private void createProject( repo.setHeaderImage("header_image.png"); repo.setRepositoryUrl(repoUrl); repo.setStatus(StatusEnum.VERIFIED); + repo.setState(StateEnum.ACTIVE); repo.setPrivacy(PrivacyEnum.PUBLIC); repo.setTags(tagSet); repo.setAuthors(authors); @@ -91,16 +95,43 @@ private void createProject( dataset.setHeaderImage("header_image.png"); dataset.setDatasetUrl(datasetUrl); dataset.setStatus(StatusEnum.VERIFIED); + dataset.setState(StateEnum.ACTIVE); dataset.setPrivacy(PrivacyEnum.PUBLIC); dataset.setTags(tagSet); dataset.setAuthors(authors); dataset = resourceRepository.save(dataset); + ModelResource model = new ModelResource(); + model.setName(name + " - ML Model"); + model.setDescription("Machine learning model for " + description); + model.setHeaderImage("header_image.png"); + model.setApplicationInterfaceId("app-" + datasetUrl); + model.setVersion("1.0"); + model.setStatus(StatusEnum.VERIFIED); + model.setState(StateEnum.ACTIVE); + model.setPrivacy(PrivacyEnum.PUBLIC); + model.setTags(tagSet); + model.setAuthors(authors); + model = resourceRepository.save(model); + + NotebookResource notebook = new NotebookResource(); + notebook.setName(name + " - Analysis Notebook"); + notebook.setDescription("Jupyter notebook for " + description); + notebook.setHeaderImage("header_image.png"); + notebook.setNotebookPath(datasetUrl + ".ipynb"); + notebook.setStatus(StatusEnum.VERIFIED); + notebook.setState(StateEnum.ACTIVE); + notebook.setPrivacy(PrivacyEnum.PUBLIC); + notebook.setTags(tagSet); + notebook.setAuthors(authors); + notebook = resourceRepository.save(notebook); + Project project = new Project(); project.setRepositoryResource(repo); project.getDatasetResources().add(dataset); project.setName(name); project.setOwnerId(user); + project.setState(StateEnum.ACTIVE); projectRepository.save(project); System.out.println("Initialized Project with id: " + project.getId()); diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/ProjectDatabaseConfig.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/ProjectDatabaseConfig.java new file mode 100644 index 0000000000..78c5960755 --- /dev/null +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/ProjectDatabaseConfig.java @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.airavata.research.service.config; + +import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.jpa.repository.config.EnableJpaRepositories; +import org.springframework.orm.jpa.JpaTransactionManager; +import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; +import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter; +import org.springframework.transaction.PlatformTransactionManager; + +import javax.sql.DataSource; +import java.util.Properties; + +@Configuration +@EnableJpaRepositories( + basePackages = {"org.apache.airavata.research.service.model.repo", "org.apache.airavata.research.service.v2.repository"}, + entityManagerFactoryRef = "entityManagerFactory", + transactionManagerRef = "transactionManager" +) +public class ProjectDatabaseConfig { + + @Bean + @ConfigurationProperties("spring.datasource") + public DataSourceProperties projectDataSourceProperties() { + return new DataSourceProperties(); + } + + @Bean + public DataSource dataSource() { + return projectDataSourceProperties() + .initializeDataSourceBuilder() + .build(); + } + + @Bean + public LocalContainerEntityManagerFactoryBean entityManagerFactory() { + LocalContainerEntityManagerFactoryBean em = new LocalContainerEntityManagerFactoryBean(); + em.setDataSource(dataSource()); + em.setPersistenceUnitName("projectPU"); + em.setPackagesToScan("org.apache.airavata.research.service.model.entity", "org.apache.airavata.research.service.v2.entity"); + + HibernateJpaVendorAdapter vendorAdapter = new HibernateJpaVendorAdapter(); + vendorAdapter.setGenerateDdl(true); + vendorAdapter.setShowSql(true); + em.setJpaVendorAdapter(vendorAdapter); + + Properties jpaProperties = new Properties(); + jpaProperties.put("hibernate.hbm2ddl.auto", "update"); + jpaProperties.put("hibernate.dialect", "org.hibernate.dialect.H2Dialect"); + jpaProperties.put("hibernate.show_sql", true); + jpaProperties.put("hibernate.format_sql", true); + + em.setJpaProperties(jpaProperties); + + return em; + } + + @Bean + public PlatformTransactionManager transactionManager() { + JpaTransactionManager transactionManager = new JpaTransactionManager(); + transactionManager.setEntityManagerFactory(entityManagerFactory().getObject()); + return transactionManager; + } +} \ No newline at end of file diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/RegistryServiceConfig.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/RegistryServiceConfig.java new file mode 100644 index 0000000000..58befaf029 --- /dev/null +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/RegistryServiceConfig.java @@ -0,0 +1,143 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.airavata.research.service.config; + +import org.apache.airavata.common.exception.ApplicationSettingsException; +import org.apache.airavata.common.utils.ServerSettings; +import org.apache.airavata.registry.api.RegistryService; +import org.apache.airavata.registry.api.client.RegistryServiceClientFactory; +import org.apache.airavata.registry.api.exception.RegistryServiceException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * Configuration for Airavata Registry Service integration + * Provides RegistryService client bean for accessing existing airavata-api infrastructure + * Uses lazy initialization to allow application startup even when registry service is unavailable + */ +@Configuration +public class RegistryServiceConfig { + + private static final Logger LOGGER = LoggerFactory.getLogger(RegistryServiceConfig.class); + + @Value("${airavata.registry.host:localhost}") + private String registryHost; + + @Value("${airavata.registry.port:9930}") + private int registryPort; + + @Value("${airavata.registry.enabled:true}") + private boolean registryEnabled; + + /** + * Creates RegistryService.Iface client bean using Airavata's RegistryServiceClientFactory + * This integrates with existing airavata-api infrastructure + * Uses lazy initialization to allow application startup without registry service + */ + @Bean + public RegistryServiceProvider registryServiceProvider() { + return new RegistryServiceProvider(); + } + + /** + * Provider class that handles lazy initialization and graceful failure of RegistryService + */ + public class RegistryServiceProvider { + private volatile RegistryService.Iface registryService; + private volatile boolean connectionAttempted = false; + private volatile Exception lastException; + + public RegistryService.Iface getRegistryService() throws RegistryServiceException { + if (!registryEnabled) { + throw new RegistryServiceException("Registry service is disabled. Enable with airavata.registry.enabled=true"); + } + + if (registryService == null && !connectionAttempted) { + synchronized (this) { + if (registryService == null && !connectionAttempted) { + connectionAttempted = true; + try { + registryService = createRegistryService(); + LOGGER.info("Successfully connected to Airavata Registry Service"); + } catch (Exception e) { + lastException = e; + LOGGER.error("Failed to connect to Airavata Registry Service at {}:{} - {}", + registryHost, registryPort, e.getMessage()); + } + } + } + } + + if (registryService == null) { + String errorMsg = String.format("Registry service unavailable at %s:%d", registryHost, registryPort); + if (lastException != null) { + errorMsg += " - " + lastException.getMessage(); + } + throw new RegistryServiceException(errorMsg); + } + + return registryService; + } + + private RegistryService.Iface createRegistryService() throws RegistryServiceException { + String serverHost = getRegistryServerHost(); + int serverPort = getRegistryServerPort(); + + LOGGER.info("Attempting to connect to Airavata Registry Service at {}:{}", serverHost, serverPort); + + RegistryService.Client registryClient = RegistryServiceClientFactory.createRegistryClient(serverHost, serverPort); + return registryClient; // RegistryService.Client implements RegistryService.Iface + } + + public boolean isAvailable() { + try { + return getRegistryService() != null; + } catch (RegistryServiceException e) { + return false; + } + } + } + + /** + * Get registry server host from Airavata ServerSettings or fallback to application properties + */ + private String getRegistryServerHost() { + try { + return ServerSettings.getRegistryServerHost(); + } catch (ApplicationSettingsException e) { + LOGGER.warn("Unable to get registry host from ServerSettings, using configured value: {}", registryHost); + return registryHost; + } + } + + /** + * Get registry server port from Airavata ServerSettings or fallback to application properties + */ + private int getRegistryServerPort() { + try { + return Integer.parseInt(ServerSettings.getRegistryServerPort()); + } catch (ApplicationSettingsException | NumberFormatException e) { + LOGGER.warn("Unable to get registry port from ServerSettings, using configured value: {}", registryPort); + return registryPort; + } + } +} \ No newline at end of file diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/SecurityConfig.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/SecurityConfig.java new file mode 100644 index 0000000000..c149f6a96c --- /dev/null +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/SecurityConfig.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.airavata.research.service.config; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.security.config.annotation.method.configuration.EnableMethodSecurity; +import org.springframework.security.config.annotation.web.builders.HttpSecurity; +import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; +import org.springframework.security.core.authority.SimpleGrantedAuthority; +import org.springframework.security.oauth2.jwt.JwtDecoder; +import org.springframework.security.oauth2.jwt.NimbusJwtDecoder; +import org.springframework.security.oauth2.server.resource.authentication.JwtAuthenticationConverter; +import org.springframework.security.oauth2.server.resource.web.BearerTokenAuthenticationFilter; +import org.springframework.security.web.SecurityFilterChain; +import org.springframework.web.cors.CorsConfiguration; +import org.springframework.web.cors.CorsConfigurationSource; +import org.springframework.web.cors.UrlBasedCorsConfigurationSource; + +import java.util.Arrays; +import java.util.Collection; +import java.util.stream.Collectors; + +@Configuration +@EnableWebSecurity +@EnableMethodSecurity +public class SecurityConfig { + + @Value("${research.auth.jwks-uri:https://auth.cybershuttle.org/realms/default/protocol/openid-connect/certs}") + private String jwksUri; + + @Value("${research.auth.issuer-uri:https://auth.cybershuttle.org/realms/default}") + private String issuerUri; + + @Value("${research.auth.dev-api-key:dev-research-api-key-12345}") + private String devApiKey; + + @Autowired + private CustomAuthenticationEntryPoint authenticationEntryPoint; + + @Bean + public SecurityFilterChain filterChain(HttpSecurity http) throws Exception { + http + .csrf(csrf -> csrf.disable()) + .cors(cors -> cors.configurationSource(corsConfigurationSource())) + .authorizeHttpRequests(authz -> authz + // Public endpoints (only v1 has public endpoints now) + .requestMatchers("/api/v1/rf/*/public/**").permitAll() + .requestMatchers("/api/dev/**").permitAll() // Dev endpoints + .requestMatchers("/actuator/health").permitAll() + + // Protected endpoints (all v2 endpoints require authentication) + .requestMatchers("/api/v1/rf/**", "/api/v2/rf/**").authenticated() + .anyRequest().authenticated() + ) + .oauth2ResourceServer(oauth2 -> oauth2 + .jwt(jwt -> jwt + .decoder(jwtDecoder()) + .jwtAuthenticationConverter(jwtAuthenticationConverter()) + ) + .authenticationEntryPoint(authenticationEntryPoint) + ) + .addFilterBefore(apiKeyFilter(), BearerTokenAuthenticationFilter.class); + + return http.build(); + } + + @Bean + public JwtDecoder jwtDecoder() { + return NimbusJwtDecoder.withJwkSetUri(jwksUri) + .jwsAlgorithm(org.springframework.security.oauth2.jose.jws.SignatureAlgorithm.RS256) + .build(); + } + + @Bean + public JwtAuthenticationConverter jwtAuthenticationConverter() { + JwtAuthenticationConverter converter = new JwtAuthenticationConverter(); + converter.setJwtGrantedAuthoritiesConverter(jwt -> { + // Extract roles from JWT claims + Collection roles = jwt.getClaimAsStringList("roles"); + if (roles == null) { + roles = Arrays.asList("USER"); // Default role + } + return roles.stream() + .map(role -> new SimpleGrantedAuthority("ROLE_" + role.toUpperCase())) + .collect(Collectors.toList()); + }); + return converter; + } + + @Bean + public ApiKeyAuthenticationFilter apiKeyFilter() { + return new ApiKeyAuthenticationFilter(devApiKey); + } + + @Bean + public CorsConfigurationSource corsConfigurationSource() { + CorsConfiguration configuration = new CorsConfiguration(); + configuration.setAllowedOriginPatterns(Arrays.asList("*")); + configuration.setAllowedMethods(Arrays.asList("GET", "POST", "PUT", "DELETE", "OPTIONS")); + configuration.setAllowedHeaders(Arrays.asList("*")); + configuration.setAllowCredentials(true); + + UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource(); + source.registerCorsConfiguration("/**", configuration); + return source; + } +} \ No newline at end of file diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/WebMvcConfig.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/WebMvcConfig.java index 06b393c56a..9959b341c8 100644 --- a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/WebMvcConfig.java +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/config/WebMvcConfig.java @@ -36,8 +36,9 @@ public class WebMvcConfig implements WebMvcConfigurer { @Override public void addCorsMappings(CorsRegistry registry) { registry.addMapping("/**") - .allowedOrigins(deployedOrigin, devOrigin) + .allowedOrigins(deployedOrigin, devOrigin, "http://localhost:5173", "http://localhost:3000") .allowedMethods("GET", "POST", "OPTIONS", "PATCH", "DELETE", "PUT") - .allowedHeaders("*"); + .allowedHeaders("*") + .allowCredentials(true); } } diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/controller/DevAuthController.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/controller/DevAuthController.java new file mode 100644 index 0000000000..4e446c871c --- /dev/null +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/controller/DevAuthController.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.airavata.research.service.controller; + +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.tags.Tag; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.context.annotation.Profile; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; + +@RestController +@RequestMapping("/api/dev") +@Profile("dev") +@Tag(name = "Development Authentication", description = "Development-only endpoints for testing authentication") +public class DevAuthController { + + private static final Logger LOGGER = LoggerFactory.getLogger(DevAuthController.class); + + @Operation(summary = "Generate test JWT token for development") + @PostMapping("/auth/token") + public ResponseEntity> generateTestToken( + @RequestParam(defaultValue = "test@example.com") String email, + @RequestParam(defaultValue = "default") String gatewayId) { + + LOGGER.info("Generating test token for email: {}, gatewayId: {}", email, gatewayId); + + try { + // For development, we'll create a simple token-like response + // In a real implementation, this would use JwtEncoder to create actual JWTs + + Instant now = Instant.now(); + String fakeToken = "dev-jwt-token-" + now.getEpochSecond() + "-" + email.hashCode(); + + Map response = new HashMap<>(); + response.put("access_token", fakeToken); + response.put("token_type", "Bearer"); + response.put("expires_in", 3600); + response.put("email", email); + response.put("gatewayID", gatewayId); + response.put("roles", Arrays.asList("USER")); + response.put("note", "This is a development-only token for testing purposes"); + + LOGGER.info("Generated test token for user: {}", email); + return ResponseEntity.ok(response); + + } catch (Exception e) { + LOGGER.error("Error generating test token: {}", e.getMessage(), e); + Map errorResponse = new HashMap<>(); + errorResponse.put("error", "token_generation_failed"); + errorResponse.put("error_description", e.getMessage()); + return ResponseEntity.internalServerError().body(errorResponse); + } + } + + @Operation(summary = "Get development API key information") + @PostMapping("/auth/api-key-info") + public ResponseEntity> getApiKeyInfo() { + LOGGER.info("Providing development API key information"); + + Map response = new HashMap<>(); + response.put("api_key_header", "X-API-Key"); + response.put("api_key_value", "dev-research-api-key-12345"); + response.put("note", "Use this API key in the X-API-Key header for development testing"); + response.put("example_curl", "curl -H \"X-API-Key: dev-research-api-key-12345\" http://localhost:8080/api/v2/rf/compute-resources/"); + + return ResponseEntity.ok(response); + } +} \ No newline at end of file diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/dto/ComputeResourceDTO.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/dto/ComputeResourceDTO.java new file mode 100644 index 0000000000..a8098bc840 --- /dev/null +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/dto/ComputeResourceDTO.java @@ -0,0 +1,287 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.airavata.research.service.dto; + +import com.fasterxml.jackson.annotation.JsonInclude; +import jakarta.validation.constraints.Min; +import jakarta.validation.constraints.NotBlank; +import jakarta.validation.constraints.NotNull; +import jakarta.validation.constraints.Size; +import java.util.ArrayList; +import java.util.List; + +/** + * UI-specific DTO for Compute Resource + * Maps to airavata-api ComputeResourceDescription with UI-specific extensions + */ +@JsonInclude(JsonInclude.Include.NON_NULL) +public class ComputeResourceDTO { + + // Core fields from ComputeResourceDescription + private String computeResourceId; + + @NotBlank(message = "Compute resource name is required") + @Size(max = 255, message = "Compute resource name must not exceed 255 characters") + private String name; + + @NotBlank(message = "Resource description is required") + @Size(max = 1000, message = "Resource description must not exceed 1000 characters") + private String resourceDescription; + + @NotBlank(message = "Hostname is required") + @Size(max = 255, message = "Hostname must not exceed 255 characters") + private String hostName; + + // UI-specific extensions stored in resourceDescription as JSON + @NotBlank(message = "Compute type is required") + @Size(max = 100, message = "Compute type must not exceed 100 characters") + private String computeType; // HPC, Cloud, Local, etc. + + @NotNull(message = "CPU cores is required") + @Min(value = 1, message = "CPU cores must be at least 1") + private Integer cpuCores; + + @NotNull(message = "Memory GB is required") + @Min(value = 1, message = "Memory GB must be at least 1") + private Integer memoryGB; + + @NotBlank(message = "Operating system is required") + @Size(max = 100, message = "Operating system must not exceed 100 characters") + private String operatingSystem; + + // Queue system is represented by BatchQueue entities, not a simple string + // Keeping for backward compatibility but making optional + private String queueSystem; // SLURM, PBS, SGE, etc. + + private String additionalInfo; + + // Resource manager type handled by ResourceJobManager entity + // Keeping for backward compatibility but making optional + private String resourceManager; // Gateway name or organization + + // Direct mappings from ComputeResourceDescription + private List hostAliases = new ArrayList<>(); + private List ipAddresses = new ArrayList<>(); + + // SSH configuration fields (mapped from JobSubmissionInterface -> SSHJobSubmission) + private Integer sshPort; // Optional - defaults to 22 if not specified + private String alternativeSSHHostName; // Optional alternative hostname + private String securityProtocol; // SSH_KEYS, USERNAME_PASSWORD (from SecurityProtocol enum) + + // Job management fields (mapped from JobSubmissionInterface -> SSHJobSubmission -> ResourceJobManager) + private String resourceJobManagerType; // PBS, SLURM, UGE, etc. (from ResourceJobManagerType enum) + + // Data movement fields (mapped from DataMovementInterface) + private String dataMovementProtocol; // SCP, SFTP, GRIDFTP, etc. (from DataMovementProtocol enum) + + // Queue management + private List queues = new ArrayList<>(); + + // System fields + private boolean enabled = true; + private Long creationTime; + private Long updateTime; + + // Default constructor + public ComputeResourceDTO() {} + + // Constructor for mapping from existing data + public ComputeResourceDTO(String computeResourceId, String hostName, String resourceDescription) { + this.computeResourceId = computeResourceId; + this.hostName = hostName; + this.resourceDescription = resourceDescription; + } + + // Getters and Setters + public String getComputeResourceId() { + return computeResourceId; + } + + public void setComputeResourceId(String computeResourceId) { + this.computeResourceId = computeResourceId; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getResourceDescription() { + return resourceDescription; + } + + public void setResourceDescription(String resourceDescription) { + this.resourceDescription = resourceDescription; + } + + public String getHostName() { + return hostName; + } + + public void setHostName(String hostName) { + this.hostName = hostName; + } + + public String getComputeType() { + return computeType; + } + + public void setComputeType(String computeType) { + this.computeType = computeType; + } + + public Integer getCpuCores() { + return cpuCores; + } + + public void setCpuCores(Integer cpuCores) { + this.cpuCores = cpuCores; + } + + public Integer getMemoryGB() { + return memoryGB; + } + + public void setMemoryGB(Integer memoryGB) { + this.memoryGB = memoryGB; + } + + public String getOperatingSystem() { + return operatingSystem; + } + + public void setOperatingSystem(String operatingSystem) { + this.operatingSystem = operatingSystem; + } + + public String getQueueSystem() { + return queueSystem; + } + + public void setQueueSystem(String queueSystem) { + this.queueSystem = queueSystem; + } + + public String getAdditionalInfo() { + return additionalInfo; + } + + public void setAdditionalInfo(String additionalInfo) { + this.additionalInfo = additionalInfo; + } + + public String getResourceManager() { + return resourceManager; + } + + public void setResourceManager(String resourceManager) { + this.resourceManager = resourceManager; + } + + public List getHostAliases() { + return hostAliases; + } + + public void setHostAliases(List hostAliases) { + this.hostAliases = hostAliases; + } + + public List getIpAddresses() { + return ipAddresses; + } + + public void setIpAddresses(List ipAddresses) { + this.ipAddresses = ipAddresses; + } + + public String getAlternativeSSHHostName() { + return alternativeSSHHostName; + } + + public void setAlternativeSSHHostName(String alternativeSSHHostName) { + this.alternativeSSHHostName = alternativeSSHHostName; + } + + public Integer getSshPort() { + return sshPort; + } + + public void setSshPort(Integer sshPort) { + this.sshPort = sshPort; + } + + public String getSecurityProtocol() { + return securityProtocol; + } + + public void setSecurityProtocol(String securityProtocol) { + this.securityProtocol = securityProtocol; + } + + public String getResourceJobManagerType() { + return resourceJobManagerType; + } + + public void setResourceJobManagerType(String resourceJobManagerType) { + this.resourceJobManagerType = resourceJobManagerType; + } + + public String getDataMovementProtocol() { + return dataMovementProtocol; + } + + public void setDataMovementProtocol(String dataMovementProtocol) { + this.dataMovementProtocol = dataMovementProtocol; + } + + public List getQueues() { + return queues; + } + + public void setQueues(List queues) { + this.queues = queues; + } + + public boolean isEnabled() { + return enabled; + } + + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } + + public Long getCreationTime() { + return creationTime; + } + + public void setCreationTime(Long creationTime) { + this.creationTime = creationTime; + } + + public Long getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Long updateTime) { + this.updateTime = updateTime; + } +} \ No newline at end of file diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/dto/ComputeResourceQueueDTO.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/dto/ComputeResourceQueueDTO.java new file mode 100644 index 0000000000..ce89fe495a --- /dev/null +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/dto/ComputeResourceQueueDTO.java @@ -0,0 +1,185 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.airavata.research.service.dto; + +import com.fasterxml.jackson.annotation.JsonInclude; +import jakarta.validation.constraints.Min; +import jakarta.validation.constraints.NotBlank; +import jakarta.validation.constraints.Size; + +/** + * UI-specific DTO for Compute Resource Queue + * Maps directly to airavata-api BatchQueue model + */ +@JsonInclude(JsonInclude.Include.NON_NULL) +public class ComputeResourceQueueDTO { + + @NotBlank(message = "Queue name is required") + @Size(max = 255, message = "Queue name must not exceed 255 characters") + private String queueName; + + @Size(max = 1000, message = "Queue description must not exceed 1000 characters") + private String queueDescription; + + @Min(value = 1, message = "Max run time must be at least 1 hour") + private Integer maxRunTime; + + @Min(value = 1, message = "Max nodes must be at least 1") + private Integer maxNodes; + + @Min(value = 1, message = "Max processors must be at least 1") + private Integer maxProcessors; + + @Min(value = 1, message = "Max jobs in queue must be at least 1") + private Integer maxJobsInQueue; + + @Min(value = 1, message = "Max memory must be at least 1") + private Integer maxMemory; + + @Min(value = 1, message = "CPUs per node must be at least 1") + private Integer cpusPerNode; + + @Min(value = 1, message = "Default node count must be at least 1") + private Integer defaultNodeCount; + + @Min(value = 1, message = "Default CPU count must be at least 1") + private Integer defaultCpuCount; + + @Min(value = 1, message = "Default wall time must be at least 1") + private Integer defaultWallTime; + + @Size(max = 1000, message = "Queue specific macros must not exceed 1000 characters") + private String queueSpecificMacros; + + private Boolean isDefaultQueue = false; + + // Default constructor + public ComputeResourceQueueDTO() {} + + // Constructor for quick creation + public ComputeResourceQueueDTO(String queueName, String queueDescription) { + this.queueName = queueName; + this.queueDescription = queueDescription; + } + + // Getters and Setters + public String getQueueName() { + return queueName; + } + + public void setQueueName(String queueName) { + this.queueName = queueName; + } + + public String getQueueDescription() { + return queueDescription; + } + + public void setQueueDescription(String queueDescription) { + this.queueDescription = queueDescription; + } + + public Integer getMaxRunTime() { + return maxRunTime; + } + + public void setMaxRunTime(Integer maxRunTime) { + this.maxRunTime = maxRunTime; + } + + public Integer getMaxNodes() { + return maxNodes; + } + + public void setMaxNodes(Integer maxNodes) { + this.maxNodes = maxNodes; + } + + public Integer getMaxProcessors() { + return maxProcessors; + } + + public void setMaxProcessors(Integer maxProcessors) { + this.maxProcessors = maxProcessors; + } + + public Integer getMaxJobsInQueue() { + return maxJobsInQueue; + } + + public void setMaxJobsInQueue(Integer maxJobsInQueue) { + this.maxJobsInQueue = maxJobsInQueue; + } + + public Integer getMaxMemory() { + return maxMemory; + } + + public void setMaxMemory(Integer maxMemory) { + this.maxMemory = maxMemory; + } + + public Integer getCpusPerNode() { + return cpusPerNode; + } + + public void setCpusPerNode(Integer cpusPerNode) { + this.cpusPerNode = cpusPerNode; + } + + public Integer getDefaultNodeCount() { + return defaultNodeCount; + } + + public void setDefaultNodeCount(Integer defaultNodeCount) { + this.defaultNodeCount = defaultNodeCount; + } + + public Integer getDefaultCpuCount() { + return defaultCpuCount; + } + + public void setDefaultCpuCount(Integer defaultCpuCount) { + this.defaultCpuCount = defaultCpuCount; + } + + public Integer getDefaultWallTime() { + return defaultWallTime; + } + + public void setDefaultWallTime(Integer defaultWallTime) { + this.defaultWallTime = defaultWallTime; + } + + public String getQueueSpecificMacros() { + return queueSpecificMacros; + } + + public void setQueueSpecificMacros(String queueSpecificMacros) { + this.queueSpecificMacros = queueSpecificMacros; + } + + public Boolean getIsDefaultQueue() { + return isDefaultQueue; + } + + public void setIsDefaultQueue(Boolean isDefaultQueue) { + this.isDefaultQueue = isDefaultQueue; + } +} \ No newline at end of file diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/dto/StorageResourceDTO.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/dto/StorageResourceDTO.java new file mode 100644 index 0000000000..2f69f256e2 --- /dev/null +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/dto/StorageResourceDTO.java @@ -0,0 +1,330 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.airavata.research.service.dto; + +import com.fasterxml.jackson.annotation.JsonInclude; +import jakarta.validation.constraints.Min; +import jakarta.validation.constraints.NotBlank; +import jakarta.validation.constraints.NotNull; +import jakarta.validation.constraints.Size; + +/** + * UI-specific DTO for Storage Resource + * Maps to airavata-api StorageResourceDescription with UI-specific extensions + */ +@JsonInclude(JsonInclude.Include.NON_NULL) +public class StorageResourceDTO { + + // Core fields from StorageResourceDescription + private String storageResourceId; + + @NotBlank(message = "Storage resource name is required") + @Size(max = 255, message = "Storage resource name must not exceed 255 characters") + private String name; + + @NotBlank(message = "Hostname is required") + @Size(max = 255, message = "Hostname must not exceed 255 characters") + private String hostName; + + @Size(max = 1000, message = "Storage resource description must not exceed 1000 characters") + private String storageResourceDescription; + + // UI-specific extensions stored in storageResourceDescription as JSON + @NotBlank(message = "Storage type is required") + @Size(max = 100, message = "Storage type must not exceed 100 characters") + private String storageType; // S3, SCP, NFS, etc. + + @NotNull(message = "Capacity TB is required") + @Min(value = 1, message = "Capacity TB must be at least 1") + private Long capacityTB; + + @NotBlank(message = "Access protocol is required") + @Size(max = 100, message = "Access protocol must not exceed 100 characters") + private String accessProtocol; // S3, SFTP, NFS, HTTP, etc. + + @NotBlank(message = "Endpoint is required") + @Size(max = 500, message = "Endpoint must not exceed 500 characters") + private String endpoint; // API endpoint or mount point + + private Boolean supportsEncryption = false; + private Boolean supportsVersioning = false; + + // S3-specific fields + @Size(max = 255, message = "Bucket name must not exceed 255 characters") + private String bucketName; + + @Size(max = 255, message = "Access key must not exceed 255 characters") + private String accessKey; + + @Size(max = 255, message = "Secret key must not exceed 255 characters") + private String secretKey; + + // SCP-specific fields + private Integer port; + + @Size(max = 255, message = "Username must not exceed 255 characters") + private String username; + + @Size(max = 50, message = "Authentication method must not exceed 50 characters") + private String authenticationMethod; // "SSH_KEY", "PASSWORD" + + private String sshKey; + + @Size(max = 500, message = "Remote path must not exceed 500 characters") + private String remotePath; + + private String additionalInfo; + + @NotBlank(message = "Resource manager is required") + @Size(max = 255, message = "Resource manager must not exceed 255 characters") + private String resourceManager; // Gateway name or organization + + // System fields + private boolean enabled = true; + private Long creationTime; + private Long updateTime; + + // Default constructor + public StorageResourceDTO() {} + + // Constructor for basic creation + public StorageResourceDTO(String hostName, String storageResourceDescription) { + this.hostName = hostName; + this.storageResourceDescription = storageResourceDescription; + } + + // S3-specific constructor + public StorageResourceDTO(String hostName, String storageResourceDescription, String storageType, + String endpoint, String bucketName, String accessKey, String secretKey, + String resourceManager) { + this.hostName = hostName; + this.storageResourceDescription = storageResourceDescription; + this.storageType = storageType; + this.endpoint = endpoint; + this.bucketName = bucketName; + this.accessKey = accessKey; + this.secretKey = secretKey; + this.resourceManager = resourceManager; + this.accessProtocol = "S3"; + this.supportsEncryption = true; + this.supportsVersioning = true; + } + + // SCP-specific constructor + public StorageResourceDTO(String hostName, String storageResourceDescription, String storageType, + Integer port, String username, String authenticationMethod, String sshKey, + String remotePath, String resourceManager) { + this.hostName = hostName; + this.storageResourceDescription = storageResourceDescription; + this.storageType = storageType; + this.port = port; + this.username = username; + this.authenticationMethod = authenticationMethod; + this.sshKey = sshKey; + this.remotePath = remotePath; + this.resourceManager = resourceManager; + this.accessProtocol = "SCP"; + this.endpoint = hostName; + } + + // Getters and Setters + public String getStorageResourceId() { + return storageResourceId; + } + + public void setStorageResourceId(String storageResourceId) { + this.storageResourceId = storageResourceId; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getHostName() { + return hostName; + } + + public void setHostName(String hostName) { + this.hostName = hostName; + } + + public String getStorageResourceDescription() { + return storageResourceDescription; + } + + public void setStorageResourceDescription(String storageResourceDescription) { + this.storageResourceDescription = storageResourceDescription; + } + + public String getStorageType() { + return storageType; + } + + public void setStorageType(String storageType) { + this.storageType = storageType; + } + + public Long getCapacityTB() { + return capacityTB; + } + + public void setCapacityTB(Long capacityTB) { + this.capacityTB = capacityTB; + } + + public String getAccessProtocol() { + return accessProtocol; + } + + public void setAccessProtocol(String accessProtocol) { + this.accessProtocol = accessProtocol; + } + + public String getEndpoint() { + return endpoint; + } + + public void setEndpoint(String endpoint) { + this.endpoint = endpoint; + } + + public Boolean getSupportsEncryption() { + return supportsEncryption; + } + + public void setSupportsEncryption(Boolean supportsEncryption) { + this.supportsEncryption = supportsEncryption; + } + + public Boolean getSupportsVersioning() { + return supportsVersioning; + } + + public void setSupportsVersioning(Boolean supportsVersioning) { + this.supportsVersioning = supportsVersioning; + } + + public String getBucketName() { + return bucketName; + } + + public void setBucketName(String bucketName) { + this.bucketName = bucketName; + } + + public String getAccessKey() { + return accessKey; + } + + public void setAccessKey(String accessKey) { + this.accessKey = accessKey; + } + + public String getSecretKey() { + return secretKey; + } + + public void setSecretKey(String secretKey) { + this.secretKey = secretKey; + } + + public Integer getPort() { + return port; + } + + public void setPort(Integer port) { + this.port = port; + } + + public String getUsername() { + return username; + } + + public void setUsername(String username) { + this.username = username; + } + + public String getAuthenticationMethod() { + return authenticationMethod; + } + + public void setAuthenticationMethod(String authenticationMethod) { + this.authenticationMethod = authenticationMethod; + } + + public String getSshKey() { + return sshKey; + } + + public void setSshKey(String sshKey) { + this.sshKey = sshKey; + } + + public String getRemotePath() { + return remotePath; + } + + public void setRemotePath(String remotePath) { + this.remotePath = remotePath; + } + + public String getAdditionalInfo() { + return additionalInfo; + } + + public void setAdditionalInfo(String additionalInfo) { + this.additionalInfo = additionalInfo; + } + + public String getResourceManager() { + return resourceManager; + } + + public void setResourceManager(String resourceManager) { + this.resourceManager = resourceManager; + } + + public boolean isEnabled() { + return enabled; + } + + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } + + public Long getCreationTime() { + return creationTime; + } + + public void setCreationTime(Long creationTime) { + this.creationTime = creationTime; + } + + public Long getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Long updateTime) { + this.updateTime = updateTime; + } +} \ No newline at end of file diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/enums/ResourceTypeEnum.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/enums/ResourceTypeEnum.java index f0054aa308..2ea4ac1da8 100644 --- a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/enums/ResourceTypeEnum.java +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/enums/ResourceTypeEnum.java @@ -23,7 +23,10 @@ public enum ResourceTypeEnum { NOTEBOOK("NOTEBOOK"), DATASET("DATASET"), REPOSITORY("REPOSITORY"), - MODEL("MODEL"); + MODEL("MODEL"), + CODE("CODE"), + COMPUTE_RESOURCE("COMPUTE_RESOURCE"), + STORAGE_RESOURCE("STORAGE_RESOURCE"); private String str; diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/handler/ComputeResourceHandler.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/handler/ComputeResourceHandler.java new file mode 100644 index 0000000000..0d38214d59 --- /dev/null +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/handler/ComputeResourceHandler.java @@ -0,0 +1,230 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.airavata.research.service.handler; + +import org.apache.airavata.registry.core.entities.appcatalog.ComputeResourceEntity; +import org.apache.airavata.research.service.dto.ComputeResourceDTO; +import org.apache.airavata.research.service.repository.ComputeResourceRepository; +import org.apache.airavata.research.service.util.DTOConverter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Component; + +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; +import java.util.UUID; + +/** + * Handler for Compute Resource operations using local entities with app_catalog database + * Direct integration with Airavata app_catalog database + */ +@Component("computeResourceHandler") +public class ComputeResourceHandler { + + private static final Logger LOGGER = LoggerFactory.getLogger(ComputeResourceHandler.class); + + private final ComputeResourceRepository computeResourceRepository; + private final DTOConverter dtoConverter; + + public ComputeResourceHandler(ComputeResourceRepository computeResourceRepository, + DTOConverter dtoConverter) { + this.computeResourceRepository = computeResourceRepository; + this.dtoConverter = dtoConverter; + } + + /** + * Get all enabled compute resources + */ + public List getAllComputeResources() { + LOGGER.info("Getting all compute resources from app_catalog"); + + try { + List entities = computeResourceRepository.findAllEnabledOrderByCreationTime(); + List dtos = new ArrayList<>(); + + for (ComputeResourceEntity entity : entities) { + ComputeResourceDTO dto = dtoConverter.computeEntityToDTO(entity); + dtos.add(dto); + } + + LOGGER.info("Found {} compute resources from app_catalog", dtos.size()); + return dtos; + } catch (Exception e) { + LOGGER.error("Failed to get compute resources from app_catalog", e); + throw new RuntimeException("Failed to get compute resources", e); + } + } + + /** + * Search compute resources by hostname + */ + public List searchComputeResources(String keyword) { + LOGGER.info("Searching compute resources in app_catalog with keyword: {}", keyword); + + try { + List entities; + + if (keyword == null || keyword.trim().isEmpty()) { + entities = computeResourceRepository.findAllEnabledOrderByCreationTime(); + } else { + entities = computeResourceRepository.findEnabledByHostNameContaining(keyword.trim()); + } + + List dtos = new ArrayList<>(); + for (ComputeResourceEntity entity : entities) { + ComputeResourceDTO dto = dtoConverter.computeEntityToDTO(entity); + dtos.add(dto); + } + + LOGGER.info("Found {} compute resources matching keyword '{}'", dtos.size(), keyword); + return dtos; + } catch (Exception e) { + LOGGER.error("Failed to search compute resources in app_catalog", e); + throw new RuntimeException("Failed to search compute resources", e); + } + } + + /** + * Get compute resource by ID + */ + public ComputeResourceDTO getComputeResource(String computeResourceId) { + LOGGER.info("Getting compute resource by ID from app_catalog: {}", computeResourceId); + + try { + Optional entityOpt = computeResourceRepository.findById(computeResourceId); + + if (entityOpt.isEmpty()) { + LOGGER.warn("Compute resource not found with ID: {}", computeResourceId); + throw new RuntimeException("Compute resource not found with ID: " + computeResourceId); + } + + ComputeResourceEntity entity = entityOpt.get(); + ComputeResourceDTO dto = dtoConverter.computeEntityToDTO(entity); + + LOGGER.info("Found compute resource: {}", entity.getHostName()); + return dto; + } catch (Exception e) { + LOGGER.error("Failed to get compute resource by ID: {}", computeResourceId, e); + throw new RuntimeException("Failed to get compute resource", e); + } + } + + /** + * Create new compute resource + */ + public ComputeResourceDTO createComputeResource(ComputeResourceDTO computeResourceDTO) { + LOGGER.info("Creating compute resource in app_catalog: {}", computeResourceDTO.getHostName()); + + try { + // Convert DTO to entity using existing DTOConverter + ComputeResourceEntity entity = dtoConverter.computeResourceDTOToEntity(computeResourceDTO); + + // Set system fields + entity.setComputeResourceId(UUID.randomUUID().toString()); + entity.setEnabled((short) 1); + entity.setCreationTime(new Timestamp(System.currentTimeMillis())); + entity.setUpdateTime(new Timestamp(System.currentTimeMillis())); + + // Save to app_catalog database + ComputeResourceEntity savedEntity = computeResourceRepository.save(entity); + + // Convert back to DTO + ComputeResourceDTO savedDTO = dtoConverter.computeEntityToDTO(savedEntity); + + LOGGER.info("Created compute resource in app_catalog with ID: {}", savedEntity.getComputeResourceId()); + return savedDTO; + } catch (Exception e) { + LOGGER.error("Failed to create compute resource in app_catalog", e); + throw new RuntimeException("Failed to create compute resource", e); + } + } + + /** + * Update existing compute resource + */ + public ComputeResourceDTO updateComputeResource(String computeResourceId, ComputeResourceDTO computeResourceDTO) { + LOGGER.info("Updating compute resource in app_catalog: {}", computeResourceId); + + try { + Optional existingOpt = computeResourceRepository.findById(computeResourceId); + + if (existingOpt.isEmpty()) { + throw new RuntimeException("Compute resource not found with ID: " + computeResourceId); + } + + // Convert DTO to entity + ComputeResourceEntity updatedEntity = dtoConverter.computeResourceDTOToEntity(computeResourceDTO); + + // Preserve system fields + ComputeResourceEntity existing = existingOpt.get(); + updatedEntity.setComputeResourceId(computeResourceId); + updatedEntity.setCreationTime(existing.getCreationTime()); + updatedEntity.setUpdateTime(new Timestamp(System.currentTimeMillis())); + + // Save updated entity + ComputeResourceEntity savedEntity = computeResourceRepository.save(updatedEntity); + + // Convert back to DTO + ComputeResourceDTO savedDTO = dtoConverter.computeEntityToDTO(savedEntity); + + LOGGER.info("Updated compute resource in app_catalog: {}", computeResourceId); + return savedDTO; + } catch (Exception e) { + LOGGER.error("Failed to update compute resource in app_catalog: {}", computeResourceId, e); + throw new RuntimeException("Failed to update compute resource", e); + } + } + + /** + * Delete compute resource + */ + public void deleteComputeResource(String computeResourceId) { + LOGGER.info("Deleting compute resource from app_catalog: {}", computeResourceId); + + try { + if (!computeResourceRepository.existsById(computeResourceId)) { + throw new RuntimeException("Compute resource not found with ID: " + computeResourceId); + } + + computeResourceRepository.deleteById(computeResourceId); + LOGGER.info("Deleted compute resource from app_catalog: {}", computeResourceId); + } catch (Exception e) { + LOGGER.error("Failed to delete compute resource from app_catalog: {}", computeResourceId, e); + throw new RuntimeException("Failed to delete compute resource", e); + } + } + + /** + * Check if compute resource exists + */ + public boolean existsComputeResource(String computeResourceId) { + LOGGER.debug("Checking if compute resource exists in app_catalog: {}", computeResourceId); + + try { + boolean exists = computeResourceRepository.existsById(computeResourceId); + LOGGER.debug("Compute resource {} exists: {}", computeResourceId, exists); + return exists; + } catch (Exception e) { + LOGGER.error("Failed to check compute resource existence in app_catalog: {}", computeResourceId, e); + return false; + } + } +} \ No newline at end of file diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/handler/StorageResourceHandler.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/handler/StorageResourceHandler.java new file mode 100644 index 0000000000..8d6b26fc11 --- /dev/null +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/handler/StorageResourceHandler.java @@ -0,0 +1,258 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.airavata.research.service.handler; + +import org.apache.airavata.registry.core.entities.appcatalog.StorageResourceEntity; +import org.apache.airavata.research.service.dto.StorageResourceDTO; +import org.apache.airavata.research.service.repository.StorageResourceRepository; +import org.apache.airavata.research.service.util.DTOConverter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Component; + +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; +import java.util.UUID; + +/** + * Handler for Storage Resource operations using local entities with app_catalog database + * Direct integration with Airavata app_catalog database + */ +@Component("storageResourceHandler") +public class StorageResourceHandler { + + private static final Logger LOGGER = LoggerFactory.getLogger(StorageResourceHandler.class); + + private final StorageResourceRepository storageResourceRepository; + private final DTOConverter dtoConverter; + + public StorageResourceHandler(StorageResourceRepository storageResourceRepository, + DTOConverter dtoConverter) { + this.storageResourceRepository = storageResourceRepository; + this.dtoConverter = dtoConverter; + } + + /** + * Get all enabled storage resources + */ + public List getAllStorageResources() { + LOGGER.info("Getting all storage resources from app_catalog"); + + try { + List entities = storageResourceRepository.findAllEnabledOrderByCreationTime(); + List dtos = new ArrayList<>(); + + for (StorageResourceEntity entity : entities) { + StorageResourceDTO dto = dtoConverter.storageEntityToDTO(entity); + dtos.add(dto); + } + + LOGGER.info("Found {} storage resources from app_catalog", dtos.size()); + return dtos; + } catch (Exception e) { + LOGGER.error("Failed to get storage resources from app_catalog", e); + throw new RuntimeException("Failed to get storage resources", e); + } + } + + /** + * Search storage resources by keyword + */ + public List searchStorageResources(String keyword) { + LOGGER.info("Searching storage resources in app_catalog with keyword: {}", keyword); + + try { + List entities; + + if (keyword == null || keyword.trim().isEmpty()) { + entities = storageResourceRepository.findAllEnabledOrderByCreationTime(); + } else { + entities = storageResourceRepository.findEnabledByHostNameContaining(keyword.trim()); + } + + List dtos = new ArrayList<>(); + for (StorageResourceEntity entity : entities) { + StorageResourceDTO dto = dtoConverter.storageEntityToDTO(entity); + dtos.add(dto); + } + + LOGGER.info("Found {} storage resources matching keyword '{}'", dtos.size(), keyword); + return dtos; + } catch (Exception e) { + LOGGER.error("Failed to search storage resources in app_catalog", e); + throw new RuntimeException("Failed to search storage resources", e); + } + } + + + /** + * Get storage resource by ID + */ + public StorageResourceDTO getStorageResource(String storageResourceId) { + LOGGER.info("Getting storage resource by ID from app_catalog: {}", storageResourceId); + + try { + Optional entityOpt = storageResourceRepository.findById(storageResourceId); + + if (entityOpt.isEmpty()) { + LOGGER.warn("Storage resource not found with ID: {}", storageResourceId); + throw new RuntimeException("Storage resource not found with ID: " + storageResourceId); + } + + StorageResourceEntity entity = entityOpt.get(); + StorageResourceDTO dto = dtoConverter.storageEntityToDTO(entity); + + LOGGER.info("Found storage resource: {}", entity.getHostName()); + return dto; + } catch (Exception e) { + LOGGER.error("Failed to get storage resource by ID: {}", storageResourceId, e); + throw new RuntimeException("Failed to get storage resource", e); + } + } + + /** + * Create new storage resource + */ + public StorageResourceDTO createStorageResource(StorageResourceDTO storageResourceDTO) { + LOGGER.info("Creating storage resource in app_catalog: {}", storageResourceDTO.getHostName()); + + try { + // Convert DTO to entity using existing DTOConverter + StorageResourceEntity entity = dtoConverter.storageResourceDTOToEntity(storageResourceDTO); + + // Set system fields + entity.setStorageResourceId(UUID.randomUUID().toString()); + entity.setEnabled(true); + entity.setCreationTime(new Timestamp(System.currentTimeMillis())); + entity.setUpdateTime(new Timestamp(System.currentTimeMillis())); + + // Save to app_catalog database + StorageResourceEntity savedEntity = storageResourceRepository.save(entity); + + // Convert back to DTO + StorageResourceDTO savedDTO = dtoConverter.storageEntityToDTO(savedEntity); + + LOGGER.info("Created storage resource in app_catalog with ID: {}", savedEntity.getStorageResourceId()); + return savedDTO; + } catch (Exception e) { + LOGGER.error("Failed to create storage resource in app_catalog", e); + throw new RuntimeException("Failed to create storage resource", e); + } + } + + /** + * Update existing storage resource + */ + public StorageResourceDTO updateStorageResource(String storageResourceId, StorageResourceDTO storageResourceDTO) { + LOGGER.info("Updating storage resource in app_catalog: {}", storageResourceId); + + try { + Optional existingOpt = storageResourceRepository.findById(storageResourceId); + + if (existingOpt.isEmpty()) { + throw new RuntimeException("Storage resource not found with ID: " + storageResourceId); + } + + // Convert DTO to entity + StorageResourceEntity updatedEntity = dtoConverter.storageResourceDTOToEntity(storageResourceDTO); + + // Preserve system fields + StorageResourceEntity existing = existingOpt.get(); + updatedEntity.setStorageResourceId(storageResourceId); + updatedEntity.setCreationTime(existing.getCreationTime()); + updatedEntity.setUpdateTime(new Timestamp(System.currentTimeMillis())); + + // Save updated entity + StorageResourceEntity savedEntity = storageResourceRepository.save(updatedEntity); + + // Convert back to DTO + StorageResourceDTO savedDTO = dtoConverter.storageEntityToDTO(savedEntity); + + LOGGER.info("Updated storage resource in app_catalog: {}", storageResourceId); + return savedDTO; + } catch (Exception e) { + LOGGER.error("Failed to update storage resource in app_catalog: {}", storageResourceId, e); + throw new RuntimeException("Failed to update storage resource", e); + } + } + + /** + * Delete storage resource + */ + public void deleteStorageResource(String storageResourceId) { + LOGGER.info("Deleting storage resource from app_catalog: {}", storageResourceId); + + try { + if (!storageResourceRepository.existsById(storageResourceId)) { + throw new RuntimeException("Storage resource not found with ID: " + storageResourceId); + } + + storageResourceRepository.deleteById(storageResourceId); + LOGGER.info("Deleted storage resource from app_catalog: {}", storageResourceId); + } catch (Exception e) { + LOGGER.error("Failed to delete storage resource from app_catalog: {}", storageResourceId, e); + throw new RuntimeException("Failed to delete storage resource", e); + } + } + + /** + * Get storage resources by type + */ + public List getStorageResourcesByType(String storageType) { + LOGGER.info("Getting storage resources by type from app_catalog: {}", storageType); + + try { + List entities = storageResourceRepository.findAllEnabledOrderByCreationTime(); + List dtos = new ArrayList<>(); + + for (StorageResourceEntity entity : entities) { + StorageResourceDTO dto = dtoConverter.storageEntityToDTO(entity); + // Filter by storage type from UI fields + if (storageType == null || storageType.isEmpty() || + (dto.getStorageType() != null && dto.getStorageType().equalsIgnoreCase(storageType))) { + dtos.add(dto); + } + } + + LOGGER.info("Found {} storage resources of type '{}'", dtos.size(), storageType); + return dtos; + } catch (Exception e) { + LOGGER.error("Failed to get storage resources by type from app_catalog", e); + throw new RuntimeException("Failed to get storage resources by type", e); + } + } + + /** + * Check if storage resource exists + */ + public boolean existsStorageResource(String storageResourceId) { + LOGGER.debug("Checking if storage resource exists in app_catalog: {}", storageResourceId); + + try { + boolean exists = storageResourceRepository.existsById(storageResourceId); + LOGGER.debug("Storage resource {} exists: {}", storageResourceId, exists); + return exists; + } catch (Exception e) { + LOGGER.error("Failed to check storage resource existence in app_catalog: {}", storageResourceId, e); + return false; + } + } +} \ No newline at end of file diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/handlers/ResourceHandler.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/handlers/ResourceHandler.java index d4bff347f4..bcaa72f1e2 100644 --- a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/handlers/ResourceHandler.java +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/handlers/ResourceHandler.java @@ -81,8 +81,9 @@ public void initializeResource(Resource resource) { UserProfile fetchedUser = airavataService.getUserProfile(authorId); userSet.add(fetchedUser.getUserId()); } catch (Exception e) { - LOGGER.error("Error while fetching user profile with the userId: {}", authorId, e); - throw new EntityNotFoundException("Error while fetching user profile with the userId: " + authorId, e); + LOGGER.warn("User profile service unavailable for userId: {}. Using provided ID for development.", authorId); + // For development, skip user validation and use the provided author ID + userSet.add(authorId); } } diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/model/entity/Tag.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/model/entity/Tag.java index e7ac271abd..a1c617efac 100644 --- a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/model/entity/Tag.java +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/model/entity/Tag.java @@ -19,6 +19,7 @@ */ package org.apache.airavata.research.service.model.entity; +import com.fasterxml.jackson.annotation.JsonProperty; import jakarta.persistence.Column; import jakarta.persistence.Entity; import jakarta.persistence.GeneratedValue; @@ -36,7 +37,8 @@ public class Tag { @Column(nullable = false, updatable = false, length = 48) private String id; - @Column(nullable = false) + @Column(name = "tag_value", nullable = false) + @JsonProperty("name") private String value; public String getId() { @@ -47,6 +49,7 @@ public void setId(String id) { this.id = id; } + @JsonProperty("name") public String getValue() { return value; } diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/repository/ComputeResourceRepository.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/repository/ComputeResourceRepository.java new file mode 100644 index 0000000000..b2db89c862 --- /dev/null +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/repository/ComputeResourceRepository.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.airavata.research.service.repository; + +import org.apache.airavata.registry.core.entities.appcatalog.ComputeResourceEntity; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; +import org.springframework.stereotype.Repository; + +import java.util.List; + +@Repository +public interface ComputeResourceRepository extends JpaRepository { + + @Query("SELECT c FROM ComputeResourceEntity c WHERE c.enabled = 1 ORDER BY c.creationTime DESC") + List findAllEnabledOrderByCreationTime(); + + @Query("SELECT c FROM ComputeResourceEntity c WHERE c.enabled = 1 AND c.hostName LIKE %:hostname%") + List findEnabledByHostNameContaining(@Param("hostname") String hostname); +} \ No newline at end of file diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/repository/StorageResourceRepository.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/repository/StorageResourceRepository.java new file mode 100644 index 0000000000..673bea64f8 --- /dev/null +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/repository/StorageResourceRepository.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.airavata.research.service.repository; + +import org.apache.airavata.registry.core.entities.appcatalog.StorageResourceEntity; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; +import org.springframework.stereotype.Repository; + +import java.util.List; + +@Repository +public interface StorageResourceRepository extends JpaRepository { + + @Query("SELECT s FROM StorageResourceEntity s WHERE s.enabled = true ORDER BY s.creationTime DESC") + List findAllEnabledOrderByCreationTime(); + + @Query("SELECT s FROM StorageResourceEntity s WHERE s.enabled = true AND s.hostName LIKE %:hostname%") + List findEnabledByHostNameContaining(@Param("hostname") String hostname); +} \ No newline at end of file diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/service/UserContextService.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/service/UserContextService.java new file mode 100644 index 0000000000..23439af265 --- /dev/null +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/service/UserContextService.java @@ -0,0 +1,95 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.airavata.research.service.service; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.airavata.research.service.config.ApiKeyAuthenticationToken; +import org.springframework.security.core.Authentication; +import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.security.oauth2.server.resource.authentication.JwtAuthenticationToken; +import org.springframework.stereotype.Service; +import org.springframework.web.context.request.RequestContextHolder; +import org.springframework.web.context.request.ServletRequestAttributes; + +@Service +public class UserContextService { + + public String getCurrentUserId() { + Authentication auth = SecurityContextHolder.getContext().getAuthentication(); + + if (auth instanceof JwtAuthenticationToken) { + JwtAuthenticationToken jwtAuth = (JwtAuthenticationToken) auth; + return jwtAuth.getToken().getClaimAsString("email"); + } else if (auth instanceof ApiKeyAuthenticationToken) { + return "api-key-user"; + } + + return "anonymous"; + } + + public String getCurrentGatewayId() { + // Extract from X-Claims header or JWT + ServletRequestAttributes attr = (ServletRequestAttributes) RequestContextHolder.currentRequestAttributes(); + String claims = attr.getRequest().getHeader("X-Claims"); + + if (claims != null) { + try { + ObjectMapper mapper = new ObjectMapper(); + JsonNode node = mapper.readTree(claims); + JsonNode gatewayNode = node.get("gatewayID"); + if (gatewayNode != null) { + return gatewayNode.asText(); + } + } catch (Exception e) { + // Fall back to default + } + } + + return "default"; + } + + public boolean isAuthenticated() { + Authentication auth = SecurityContextHolder.getContext().getAuthentication(); + return auth != null && auth.isAuthenticated() && !"anonymous".equals(auth.getName()); + } + + public String getCurrentUserName() { + // Extract from X-Claims header first + ServletRequestAttributes attr = (ServletRequestAttributes) RequestContextHolder.currentRequestAttributes(); + String claims = attr.getRequest().getHeader("X-Claims"); + + if (claims != null) { + try { + ObjectMapper mapper = new ObjectMapper(); + JsonNode node = mapper.readTree(claims); + JsonNode userNameNode = node.get("userName"); + if (userNameNode != null) { + return userNameNode.asText(); + } + } catch (Exception e) { + // Fall back to authentication context + } + } + + // Fall back to current user ID + return getCurrentUserId(); + } +} \ No newline at end of file diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/util/DTOConverter.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/util/DTOConverter.java new file mode 100644 index 0000000000..ea45ebaea8 --- /dev/null +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/util/DTOConverter.java @@ -0,0 +1,1002 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.airavata.research.service.util; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.util.*; +import java.util.stream.Collectors; +import org.apache.airavata.model.appcatalog.computeresource.BatchQueue; +import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription; +import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface; +import org.apache.airavata.model.data.movement.DataMovementInterface; +import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol; +import org.apache.airavata.model.data.movement.DataMovementProtocol; +import org.apache.airavata.model.appcatalog.storageresource.StorageResourceDescription; +import org.apache.airavata.research.service.dto.ComputeResourceDTO; +import org.apache.airavata.research.service.dto.ComputeResourceQueueDTO; +import org.apache.airavata.research.service.dto.StorageResourceDTO; +import org.apache.airavata.registry.core.entities.appcatalog.ComputeResourceEntity; +import org.apache.airavata.registry.core.entities.appcatalog.StorageResourceEntity; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Component; + + +/** + * Utility class for converting between entities and DTOs + * Handles JSON serialization of UI-specific fields into description fields + */ +@Component +public class DTOConverter { + + private static final Logger LOGGER = LoggerFactory.getLogger(DTOConverter.class); + private final ObjectMapper objectMapper = new ObjectMapper(); + + // JSON field names for UI-specific data + private static final String UI_FIELDS_KEY = "uiFields"; + private static final String COMPUTE_TYPE_KEY = "computeType"; + private static final String OPERATING_SYSTEM_KEY = "operatingSystem"; + private static final String QUEUE_SYSTEM_KEY = "queueSystem"; + private static final String ADDITIONAL_INFO_KEY = "additionalInfo"; + private static final String RESOURCE_MANAGER_KEY = "resourceManager"; + private static final String SSH_CONFIG_KEY = "sshConfig"; + private static final String SSH_USERNAME_KEY = "sshUsername"; + private static final String SSH_PORT_KEY = "sshPort"; + private static final String AUTH_METHOD_KEY = "authenticationMethod"; + private static final String SSH_KEY_KEY = "sshKey"; + private static final String WORKING_DIR_KEY = "workingDirectory"; + private static final String SCHEDULER_TYPE_KEY = "schedulerType"; + private static final String DATA_MOVEMENT_PROTOCOL_KEY = "dataMovementProtocol"; + + // Additional compute fields that need to be preserved + private static final String NAME_KEY = "name"; + private static final String HOST_ALIASES_KEY = "hostAliases"; + private static final String IP_ADDRESSES_KEY = "ipAddresses"; + private static final String QUEUES_KEY = "queues"; + + // Storage-specific field names + private static final String STORAGE_TYPE_KEY = "storageType"; + private static final String CAPACITY_TB_KEY = "capacityTB"; + private static final String ACCESS_PROTOCOL_KEY = "accessProtocol"; + private static final String ENDPOINT_KEY = "endpoint"; + private static final String SUPPORTS_ENCRYPTION_KEY = "supportsEncryption"; + private static final String SUPPORTS_VERSIONING_KEY = "supportsVersioning"; + private static final String S3_CONFIG_KEY = "s3Config"; + private static final String BUCKET_NAME_KEY = "bucketName"; + private static final String ACCESS_KEY_KEY = "accessKey"; + private static final String SECRET_KEY_KEY = "secretKey"; + private static final String SCP_CONFIG_KEY = "scpConfig"; + private static final String PORT_KEY = "port"; + private static final String USERNAME_KEY = "username"; + private static final String REMOTE_PATH_KEY = "remotePath"; + + /** + * Convert ComputeResourceDescription to ComputeResourceDTO + */ + public ComputeResourceDTO thriftToDTO(ComputeResourceDescription thriftModel) { + if (thriftModel == null) { + return null; + } + + ComputeResourceDTO dto = new ComputeResourceDTO(); + + // Direct mappings + dto.setComputeResourceId(thriftModel.getComputeResourceId()); + dto.setHostName(thriftModel.getHostName()); + dto.setHostAliases(thriftModel.getHostAliases()); + dto.setIpAddresses(thriftModel.getIpAddresses()); + dto.setEnabled(thriftModel.isEnabled()); + + // Map memory (convert from MB to GB if needed) + if (thriftModel.isSetMaxMemoryPerNode()) { + dto.setMemoryGB(thriftModel.getMaxMemoryPerNode() / 1024); // Assuming thrift is in MB + } + + // Map CPU cores + if (thriftModel.isSetCpusPerNode()) { + dto.setCpuCores(thriftModel.getCpusPerNode()); + } + + // Extract UI-specific fields from resourceDescription JSON + parseResourceDescriptionForComputeResource(thriftModel.getResourceDescription(), dto); + + // Convert batch queues to queue DTOs + if (thriftModel.getBatchQueues() != null) { + dto.setQueues(thriftModel.getBatchQueues().stream() + .map(this::batchQueueToDTO) + .collect(Collectors.toList())); + } + + // Extract data movement protocol from DataMovementInterface + if (thriftModel.getDataMovementInterfaces() != null && !thriftModel.getDataMovementInterfaces().isEmpty()) { + // Get the first (highest priority) data movement interface + DataMovementInterface dmInterface = thriftModel.getDataMovementInterfaces().get(0); + if (dmInterface != null && dmInterface.getDataMovementProtocol() != null) { + dto.setDataMovementProtocol(dmInterface.getDataMovementProtocol().toString()); + } + } + + // Extract resource job manager type from JobSubmissionInterface + if (thriftModel.getJobSubmissionInterfaces() != null && !thriftModel.getJobSubmissionInterfaces().isEmpty()) { + // Get the first (highest priority) job submission interface + JobSubmissionInterface jsInterface = thriftModel.getJobSubmissionInterfaces().get(0); + if (jsInterface != null && jsInterface.getJobSubmissionProtocol() != null) { + dto.setResourceJobManagerType(jsInterface.getJobSubmissionProtocol().toString()); + } + } + + return dto; + } + + /** + * Convert ComputeResourceDTO to ComputeResourceDescription + */ + public ComputeResourceDescription dtoToThrift(ComputeResourceDTO dto) { + if (dto == null) { + return null; + } + + ComputeResourceDescription thriftModel = new ComputeResourceDescription(); + + // Direct mappings + thriftModel.setComputeResourceId(dto.getComputeResourceId()); + thriftModel.setHostName(dto.getHostName()); + thriftModel.setHostAliases(dto.getHostAliases()); + thriftModel.setIpAddresses(dto.getIpAddresses()); + thriftModel.setEnabled(dto.isEnabled()); + + // Map memory (convert from GB to MB) + if (dto.getMemoryGB() != null) { + thriftModel.setMaxMemoryPerNode(dto.getMemoryGB() * 1024); + } + + // Map CPU cores + if (dto.getCpuCores() != null) { + thriftModel.setCpusPerNode(dto.getCpuCores()); + } + + // Store UI-specific fields as JSON in resourceDescription + thriftModel.setResourceDescription(buildResourceDescriptionForComputeResource(dto)); + + // Convert queue DTOs to batch queues + if (dto.getQueues() != null) { + thriftModel.setBatchQueues(dto.getQueues().stream() + .map(this::dtoToBatchQueue) + .collect(Collectors.toList())); + } + + // Create DataMovementInterface if protocol is specified + if (dto.getDataMovementProtocol() != null && !dto.getDataMovementProtocol().trim().isEmpty()) { + List dataMovementInterfaces = new ArrayList<>(); + DataMovementInterface dmInterface = new DataMovementInterface(); + dmInterface.setDataMovementInterfaceId(generateInterfaceId("dm")); + dmInterface.setPriorityOrder(1); // Highest priority + try { + dmInterface.setDataMovementProtocol(DataMovementProtocol.valueOf(dto.getDataMovementProtocol())); + dataMovementInterfaces.add(dmInterface); + thriftModel.setDataMovementInterfaces(dataMovementInterfaces); + } catch (IllegalArgumentException e) { + LOGGER.warn("Invalid data movement protocol: " + dto.getDataMovementProtocol(), e); + } + } + + // Create JobSubmissionInterface if protocol is specified + if (dto.getResourceJobManagerType() != null && !dto.getResourceJobManagerType().trim().isEmpty()) { + List jobSubmissionInterfaces = new ArrayList<>(); + JobSubmissionInterface jsInterface = new JobSubmissionInterface(); + jsInterface.setJobSubmissionInterfaceId(generateInterfaceId("js")); + jsInterface.setPriorityOrder(1); // Highest priority + try { + jsInterface.setJobSubmissionProtocol(JobSubmissionProtocol.valueOf(dto.getResourceJobManagerType())); + jobSubmissionInterfaces.add(jsInterface); + thriftModel.setJobSubmissionInterfaces(jobSubmissionInterfaces); + } catch (IllegalArgumentException e) { + LOGGER.warn("Invalid job submission protocol: " + dto.getResourceJobManagerType(), e); + } + } + + return thriftModel; + } + + /** + * Convert StorageResourceDescription to StorageResourceDTO + */ + public StorageResourceDTO thriftToDTO(StorageResourceDescription thriftModel) { + if (thriftModel == null) { + return null; + } + + StorageResourceDTO dto = new StorageResourceDTO(); + + // Direct mappings + dto.setStorageResourceId(thriftModel.getStorageResourceId()); + dto.setHostName(thriftModel.getHostName()); + dto.setEnabled(thriftModel.isEnabled()); + dto.setCreationTime(thriftModel.getCreationTime()); + dto.setUpdateTime(thriftModel.getUpdateTime()); + + // Extract UI-specific fields from storageResourceDescription JSON + parseResourceDescriptionForStorageResource(thriftModel.getStorageResourceDescription(), dto); + + return dto; + } + + /** + * Convert StorageResourceDTO to StorageResourceDescription + */ + public StorageResourceDescription dtoToThrift(StorageResourceDTO dto) { + if (dto == null) { + return null; + } + + StorageResourceDescription thriftModel = new StorageResourceDescription(); + + // Direct mappings + thriftModel.setStorageResourceId(dto.getStorageResourceId()); + thriftModel.setHostName(dto.getHostName()); + thriftModel.setEnabled(dto.isEnabled()); + + // Store UI-specific fields as JSON in storageResourceDescription + thriftModel.setStorageResourceDescription(buildResourceDescriptionForStorageResource(dto)); + + return thriftModel; + } + + /** + * Convert BatchQueue to ComputeResourceQueueDTO + */ + public ComputeResourceQueueDTO batchQueueToDTO(BatchQueue batchQueue) { + if (batchQueue == null) { + return null; + } + + ComputeResourceQueueDTO dto = new ComputeResourceQueueDTO(); + dto.setQueueName(batchQueue.getQueueName()); + dto.setQueueDescription(batchQueue.getQueueDescription()); + dto.setMaxRunTime(batchQueue.getMaxRunTime()); + dto.setMaxNodes(batchQueue.getMaxNodes()); + dto.setMaxProcessors(batchQueue.getMaxProcessors()); + dto.setMaxJobsInQueue(batchQueue.getMaxJobsInQueue()); + dto.setMaxMemory(batchQueue.getMaxMemory()); + dto.setCpusPerNode(batchQueue.getCpuPerNode()); + dto.setDefaultNodeCount(batchQueue.getDefaultNodeCount()); + dto.setDefaultCpuCount(batchQueue.getDefaultCPUCount()); + dto.setDefaultWallTime(batchQueue.getDefaultWalltime()); + dto.setQueueSpecificMacros(batchQueue.getQueueSpecificMacros()); + dto.setIsDefaultQueue(batchQueue.isIsDefaultQueue()); + + return dto; + } + + /** + * Convert ComputeResourceQueueDTO to BatchQueue + */ + public BatchQueue dtoToBatchQueue(ComputeResourceQueueDTO dto) { + if (dto == null) { + return null; + } + + BatchQueue batchQueue = new BatchQueue(); + batchQueue.setQueueName(dto.getQueueName()); + batchQueue.setQueueDescription(dto.getQueueDescription()); + batchQueue.setMaxRunTime(dto.getMaxRunTime() != null ? dto.getMaxRunTime() : 0); + batchQueue.setMaxNodes(dto.getMaxNodes() != null ? dto.getMaxNodes() : 0); + batchQueue.setMaxProcessors(dto.getMaxProcessors() != null ? dto.getMaxProcessors() : 0); + batchQueue.setMaxJobsInQueue(dto.getMaxJobsInQueue() != null ? dto.getMaxJobsInQueue() : 0); + batchQueue.setMaxMemory(dto.getMaxMemory() != null ? dto.getMaxMemory() : 0); + batchQueue.setCpuPerNode(dto.getCpusPerNode() != null ? dto.getCpusPerNode() : 0); + batchQueue.setDefaultNodeCount(dto.getDefaultNodeCount() != null ? dto.getDefaultNodeCount() : 0); + batchQueue.setDefaultCPUCount(dto.getDefaultCpuCount() != null ? dto.getDefaultCpuCount() : 0); + batchQueue.setDefaultWalltime(dto.getDefaultWallTime() != null ? dto.getDefaultWallTime() : 0); + batchQueue.setQueueSpecificMacros(dto.getQueueSpecificMacros()); + batchQueue.setIsDefaultQueue(dto.getIsDefaultQueue() != null ? dto.getIsDefaultQueue() : false); + + return batchQueue; + } + + /** + * Parse JSON from resourceDescription and populate ComputeResourceDTO UI fields + */ + private void parseResourceDescriptionForComputeResource(String resourceDescription, ComputeResourceDTO dto) { + if (resourceDescription == null || resourceDescription.trim().isEmpty()) { + return; + } + + try { + JsonNode rootNode = objectMapper.readTree(resourceDescription); + JsonNode uiFieldsNode = rootNode.get(UI_FIELDS_KEY); + + if (uiFieldsNode != null) { + // Extract UI-specific fields + dto.setComputeType(getStringValue(uiFieldsNode, COMPUTE_TYPE_KEY)); + dto.setOperatingSystem(getStringValue(uiFieldsNode, OPERATING_SYSTEM_KEY)); + dto.setQueueSystem(getStringValue(uiFieldsNode, QUEUE_SYSTEM_KEY)); + dto.setAdditionalInfo(getStringValue(uiFieldsNode, ADDITIONAL_INFO_KEY)); + dto.setResourceManager(getStringValue(uiFieldsNode, RESOURCE_MANAGER_KEY)); + dto.setResourceJobManagerType(getStringValue(uiFieldsNode, SCHEDULER_TYPE_KEY)); // Map old schedulerType to new field + dto.setDataMovementProtocol(getStringValue(uiFieldsNode, DATA_MOVEMENT_PROTOCOL_KEY)); + + // Extract SSH configuration + JsonNode sshConfigNode = uiFieldsNode.get(SSH_CONFIG_KEY); + if (sshConfigNode != null) { + dto.setAlternativeSSHHostName(getStringValue(sshConfigNode, SSH_USERNAME_KEY)); // Repurpose for alternative hostname + dto.setSshPort(getIntegerValue(sshConfigNode, SSH_PORT_KEY)); + dto.setSecurityProtocol(getStringValue(sshConfigNode, AUTH_METHOD_KEY)); // Map to securityProtocol + } + } + + // Set basic description (without UI fields) + JsonNode basicDescNode = rootNode.get("description"); + if (basicDescNode != null) { + dto.setResourceDescription(basicDescNode.asText()); + } + + } catch (JsonProcessingException e) { + LOGGER.warn("Failed to parse resourceDescription JSON, treating as plain text: {}", e.getMessage()); + dto.setResourceDescription(resourceDescription); + } + } + + /** + * Build JSON resourceDescription from ComputeResourceDTO + */ + private String buildResourceDescriptionForComputeResource(ComputeResourceDTO dto) { + Map rootMap = new HashMap<>(); + + // Basic description + if (dto.getResourceDescription() != null) { + rootMap.put("description", dto.getResourceDescription()); + } + + // UI-specific fields + Map uiFields = new HashMap<>(); + uiFields.put(COMPUTE_TYPE_KEY, dto.getComputeType()); + uiFields.put(OPERATING_SYSTEM_KEY, dto.getOperatingSystem()); + uiFields.put(QUEUE_SYSTEM_KEY, dto.getQueueSystem()); + uiFields.put(ADDITIONAL_INFO_KEY, dto.getAdditionalInfo()); + uiFields.put(RESOURCE_MANAGER_KEY, dto.getResourceManager()); + uiFields.put(SCHEDULER_TYPE_KEY, dto.getResourceJobManagerType()); // Use new field name + uiFields.put(DATA_MOVEMENT_PROTOCOL_KEY, dto.getDataMovementProtocol()); + + // SSH configuration + Map sshConfig = new HashMap<>(); + sshConfig.put(SSH_USERNAME_KEY, dto.getAlternativeSSHHostName()); // Repurposed field + sshConfig.put(SSH_PORT_KEY, dto.getSshPort()); + sshConfig.put(AUTH_METHOD_KEY, dto.getSecurityProtocol()); // Use new field name + uiFields.put(SSH_CONFIG_KEY, sshConfig); + + rootMap.put(UI_FIELDS_KEY, uiFields); + + try { + return objectMapper.writeValueAsString(rootMap); + } catch (JsonProcessingException e) { + LOGGER.error("Failed to serialize UI fields to JSON", e); + return dto.getResourceDescription() != null ? dto.getResourceDescription() : ""; + } + } + + /** + * Parse JSON from storageResourceDescription and populate StorageResourceDTO UI fields + */ + private void parseResourceDescriptionForStorageResource(String storageResourceDescription, StorageResourceDTO dto) { + if (storageResourceDescription == null || storageResourceDescription.trim().isEmpty()) { + return; + } + + try { + JsonNode rootNode = objectMapper.readTree(storageResourceDescription); + JsonNode uiFieldsNode = rootNode.get(UI_FIELDS_KEY); + + if (uiFieldsNode != null) { + // Extract UI-specific fields + dto.setStorageType(getStringValue(uiFieldsNode, STORAGE_TYPE_KEY)); + dto.setCapacityTB(getLongValue(uiFieldsNode, CAPACITY_TB_KEY)); + dto.setAccessProtocol(getStringValue(uiFieldsNode, ACCESS_PROTOCOL_KEY)); + dto.setEndpoint(getStringValue(uiFieldsNode, ENDPOINT_KEY)); + dto.setSupportsEncryption(getBooleanValue(uiFieldsNode, SUPPORTS_ENCRYPTION_KEY)); + dto.setSupportsVersioning(getBooleanValue(uiFieldsNode, SUPPORTS_VERSIONING_KEY)); + dto.setAdditionalInfo(getStringValue(uiFieldsNode, ADDITIONAL_INFO_KEY)); + dto.setResourceManager(getStringValue(uiFieldsNode, RESOURCE_MANAGER_KEY)); + + // Extract S3 configuration + JsonNode s3ConfigNode = uiFieldsNode.get(S3_CONFIG_KEY); + if (s3ConfigNode != null) { + dto.setBucketName(getStringValue(s3ConfigNode, BUCKET_NAME_KEY)); + dto.setAccessKey(getStringValue(s3ConfigNode, ACCESS_KEY_KEY)); + dto.setSecretKey(getStringValue(s3ConfigNode, SECRET_KEY_KEY)); + } + + // Extract SCP configuration + JsonNode scpConfigNode = uiFieldsNode.get(SCP_CONFIG_KEY); + if (scpConfigNode != null) { + dto.setPort(getIntegerValue(scpConfigNode, PORT_KEY)); + dto.setUsername(getStringValue(scpConfigNode, USERNAME_KEY)); + dto.setAuthenticationMethod(getStringValue(scpConfigNode, AUTH_METHOD_KEY)); + dto.setSshKey(getStringValue(scpConfigNode, SSH_KEY_KEY)); + dto.setRemotePath(getStringValue(scpConfigNode, REMOTE_PATH_KEY)); + } + } + + // Set basic description (without UI fields) + JsonNode basicDescNode = rootNode.get("description"); + if (basicDescNode != null) { + dto.setStorageResourceDescription(basicDescNode.asText()); + } + + } catch (JsonProcessingException e) { + LOGGER.warn("Failed to parse storageResourceDescription JSON, treating as plain text: {}", e.getMessage()); + dto.setStorageResourceDescription(storageResourceDescription); + } + } + + /** + * Build JSON storageResourceDescription from StorageResourceDTO + */ + private String buildResourceDescriptionForStorageResource(StorageResourceDTO dto) { + Map rootMap = new HashMap<>(); + + // Basic description + if (dto.getStorageResourceDescription() != null) { + rootMap.put("description", dto.getStorageResourceDescription()); + } + + // UI-specific fields + Map uiFields = new HashMap<>(); + uiFields.put(STORAGE_TYPE_KEY, dto.getStorageType()); + uiFields.put(CAPACITY_TB_KEY, dto.getCapacityTB()); + uiFields.put(ACCESS_PROTOCOL_KEY, dto.getAccessProtocol()); + uiFields.put(ENDPOINT_KEY, dto.getEndpoint()); + uiFields.put(SUPPORTS_ENCRYPTION_KEY, dto.getSupportsEncryption()); + uiFields.put(SUPPORTS_VERSIONING_KEY, dto.getSupportsVersioning()); + uiFields.put(ADDITIONAL_INFO_KEY, dto.getAdditionalInfo()); + uiFields.put(RESOURCE_MANAGER_KEY, dto.getResourceManager()); + + // S3 configuration + if ("S3".equalsIgnoreCase(dto.getStorageType())) { + Map s3Config = new HashMap<>(); + s3Config.put(BUCKET_NAME_KEY, dto.getBucketName()); + s3Config.put(ACCESS_KEY_KEY, dto.getAccessKey()); + s3Config.put(SECRET_KEY_KEY, dto.getSecretKey()); + uiFields.put(S3_CONFIG_KEY, s3Config); + } + + // SCP configuration + if ("SCP".equalsIgnoreCase(dto.getStorageType())) { + Map scpConfig = new HashMap<>(); + scpConfig.put(PORT_KEY, dto.getPort()); + scpConfig.put(USERNAME_KEY, dto.getUsername()); + scpConfig.put(AUTH_METHOD_KEY, dto.getAuthenticationMethod()); + scpConfig.put(SSH_KEY_KEY, dto.getSshKey()); + scpConfig.put(REMOTE_PATH_KEY, dto.getRemotePath()); + uiFields.put(SCP_CONFIG_KEY, scpConfig); + } + + rootMap.put(UI_FIELDS_KEY, uiFields); + + try { + return objectMapper.writeValueAsString(rootMap); + } catch (JsonProcessingException e) { + LOGGER.error("Failed to serialize UI fields to JSON", e); + return dto.getStorageResourceDescription() != null ? dto.getStorageResourceDescription() : ""; + } + } + + // Helper methods for extracting values from JSON nodes + private String getStringValue(JsonNode node, String key) { + JsonNode valueNode = node.get(key); + return valueNode != null && !valueNode.isNull() ? valueNode.asText() : null; + } + + private Integer getIntegerValue(JsonNode node, String key) { + JsonNode valueNode = node.get(key); + return valueNode != null && !valueNode.isNull() ? valueNode.asInt() : null; + } + + private Long getLongValue(JsonNode node, String key) { + JsonNode valueNode = node.get(key); + return valueNode != null && !valueNode.isNull() ? valueNode.asLong() : null; + } + + private Boolean getBooleanValue(JsonNode node, String key) { + JsonNode valueNode = node.get(key); + return valueNode != null && !valueNode.isNull() ? valueNode.asBoolean() : null; + } + + // =============================== + // JPA Entity Conversion Methods + // =============================== + + /** + * Convert StorageResourceEntity (JPA) to StorageResourceDTO + */ + public StorageResourceDTO storageEntityToDTO(StorageResourceEntity entity) { + if (entity == null) { + return null; + } + + StorageResourceDTO dto = new StorageResourceDTO(); + + // Core fields + dto.setStorageResourceId(entity.getStorageResourceId()); + dto.setHostName(entity.getHostName()); + dto.setStorageResourceDescription(entity.getStorageResourceDescription()); + // Handle enabled field - boolean type from database + dto.setEnabled(entity.isEnabled()); + + // Extract name from UI fields or generate fallback + String extractedName = extractNameFromStorageDescription(entity.getStorageResourceDescription()); + if (extractedName != null && !extractedName.trim().isEmpty()) { + dto.setName(extractedName); + } else { + // Generate name from hostname and description + dto.setName(generateStorageResourceName(entity.getHostName(), entity.getStorageResourceDescription())); + } + + // Timestamps + if (entity.getCreationTime() != null) { + dto.setCreationTime(entity.getCreationTime().getTime()); + } + if (entity.getUpdateTime() != null) { + dto.setUpdateTime(entity.getUpdateTime().getTime()); + } + + // Extract UI-specific fields from JSON stored in description + extractStorageUIFieldsFromDescription(entity.getStorageResourceDescription(), dto); + + return dto; + } + + /** + * Convert StorageResourceDTO to StorageResourceEntity (JPA) + */ + public StorageResourceEntity storageResourceDTOToEntity(StorageResourceDTO dto) { + if (dto == null) { + return null; + } + + StorageResourceEntity entity = new StorageResourceEntity(); + + // Core fields + entity.setStorageResourceId(dto.getStorageResourceId()); + entity.setHostName(dto.getHostName()); + entity.setEnabled(dto.isEnabled()); + + // Encode UI-specific fields into JSON within description + entity.setStorageResourceDescription(encodeStorageUIFieldsIntoDescription(dto)); + + return entity; + } + + /** + * Convert ComputeResourceEntity (JPA) to ComputeResourceDTO + */ + public ComputeResourceDTO computeEntityToDTO(ComputeResourceEntity entity) { + if (entity == null) { + return null; + } + + ComputeResourceDTO dto = new ComputeResourceDTO(); + + // Core fields + dto.setComputeResourceId(entity.getComputeResourceId()); + dto.setHostName(entity.getHostName()); + dto.setResourceDescription(entity.getResourceDescription()); + // Handle enabled field safely - Short type from database + Short enabledValue = entity.getEnabled(); + dto.setEnabled(enabledValue != null && enabledValue.shortValue() == 1); + dto.setCpuCores(entity.getCpusPerNode()); + dto.setMemoryGB(entity.getMaxMemoryPerNode()); + + // Extract name from UI fields or generate fallback + String extractedName = extractNameFromDescription(entity.getResourceDescription()); + if (extractedName != null && !extractedName.trim().isEmpty()) { + dto.setName(extractedName); + } else { + // Generate name from hostname and description + dto.setName(generateComputeResourceName(entity.getHostName(), entity.getResourceDescription())); + } + + // Timestamps + if (entity.getCreationTime() != null) { + dto.setCreationTime(entity.getCreationTime().getTime()); + } + if (entity.getUpdateTime() != null) { + dto.setUpdateTime(entity.getUpdateTime().getTime()); + } + + // Extract UI-specific fields from JSON stored in description + extractComputeUIFieldsFromDescription(entity.getResourceDescription(), dto); + + // Initialize empty arrays for fields not stored in database + if (dto.getHostAliases() == null) { + dto.setHostAliases(new ArrayList<>()); + } + if (dto.getIpAddresses() == null) { + dto.setIpAddresses(new ArrayList<>()); + } + if (dto.getQueues() == null) { + dto.setQueues(new ArrayList<>()); + } + + return dto; + } + + /** + * Convert ComputeResourceDTO to ComputeResourceEntity (JPA) + */ + public ComputeResourceEntity computeResourceDTOToEntity(ComputeResourceDTO dto) { + if (dto == null) { + return null; + } + + ComputeResourceEntity entity = new ComputeResourceEntity(); + + // Core fields + entity.setComputeResourceId(dto.getComputeResourceId()); + entity.setHostName(dto.getHostName()); + entity.setEnabled(dto.isEnabled() ? Short.valueOf((short) 1) : Short.valueOf((short) 0)); + entity.setCpusPerNode(dto.getCpuCores()); + entity.setMaxMemoryPerNode(dto.getMemoryGB()); + + // Encode UI-specific fields into JSON within description + entity.setResourceDescription(encodeComputeUIFieldsIntoDescription(dto)); + + return entity; + } + + // Helper method to extract storage UI fields from JSON in description + private void extractStorageUIFieldsFromDescription(String description, StorageResourceDTO dto) { + if (description == null || !description.contains("UI_FIELDS:")) { + return; + } + + try { + // Extract JSON part after UI_FIELDS: + String jsonPart = description.substring(description.indexOf("UI_FIELDS:") + 10).trim(); + JsonNode rootNode = objectMapper.readTree(jsonPart); + + // Extract UI-specific fields + dto.setStorageType(getStringValue(rootNode, STORAGE_TYPE_KEY)); + dto.setCapacityTB(getLongValue(rootNode, CAPACITY_TB_KEY)); + dto.setAccessProtocol(getStringValue(rootNode, ACCESS_PROTOCOL_KEY)); + dto.setSupportsEncryption(getBooleanValue(rootNode, SUPPORTS_ENCRYPTION_KEY)); + dto.setSupportsVersioning(getBooleanValue(rootNode, SUPPORTS_VERSIONING_KEY)); + + // Extract preserved fields + dto.setName(getStringValue(rootNode, NAME_KEY)); + + // S3-specific fields + dto.setBucketName(getStringValue(rootNode, BUCKET_NAME_KEY)); + dto.setAccessKey(getStringValue(rootNode, ACCESS_KEY_KEY)); + dto.setSecretKey(getStringValue(rootNode, SECRET_KEY_KEY)); + + // SCP-specific fields + dto.setPort(getIntegerValue(rootNode, PORT_KEY)); + dto.setUsername(getStringValue(rootNode, USERNAME_KEY)); + dto.setAuthenticationMethod(getStringValue(rootNode, AUTH_METHOD_KEY)); + dto.setRemotePath(getStringValue(rootNode, REMOTE_PATH_KEY)); + + // Clean description (remove UI_FIELDS part) + String cleanDescription = description.substring(0, description.indexOf("UI_FIELDS:")).trim(); + if (cleanDescription.endsWith("\n\n")) { + cleanDescription = cleanDescription.substring(0, cleanDescription.length() - 2); + } + dto.setStorageResourceDescription(cleanDescription); + + } catch (Exception e) { + LOGGER.warn("Failed to extract storage UI fields from description", e); + } + } + + // Helper method to extract compute UI fields from JSON in description + private void extractComputeUIFieldsFromDescription(String description, ComputeResourceDTO dto) { + if (description == null || !description.contains("UI_FIELDS:")) { + return; + } + + try { + // Extract JSON part after UI_FIELDS: + String jsonPart = description.substring(description.indexOf("UI_FIELDS:") + 10).trim(); + JsonNode rootNode = objectMapper.readTree(jsonPart); + + // Extract UI-specific fields + dto.setComputeType(getStringValue(rootNode, COMPUTE_TYPE_KEY)); + dto.setOperatingSystem(getStringValue(rootNode, OPERATING_SYSTEM_KEY)); + dto.setResourceJobManagerType(getStringValue(rootNode, SCHEDULER_TYPE_KEY)); // Map old schedulerType to new field + dto.setDataMovementProtocol(getStringValue(rootNode, DATA_MOVEMENT_PROTOCOL_KEY)); + dto.setQueueSystem(getStringValue(rootNode, QUEUE_SYSTEM_KEY)); + dto.setResourceManager(getStringValue(rootNode, RESOURCE_MANAGER_KEY)); + + // Extract SSH fields (updated field names) + dto.setSshPort(getIntegerValue(rootNode, SSH_PORT_KEY)); + dto.setSecurityProtocol(getStringValue(rootNode, AUTH_METHOD_KEY)); // Map authenticationMethod to securityProtocol + dto.setAlternativeSSHHostName(getStringValue(rootNode, SSH_USERNAME_KEY)); // Repurpose for alternative hostname + + // Extract preserved fields + dto.setName(getStringValue(rootNode, NAME_KEY)); + + // Extract arrays + JsonNode hostAliasesNode = rootNode.get(HOST_ALIASES_KEY); + if (hostAliasesNode != null && hostAliasesNode.isArray()) { + List hostAliases = new ArrayList<>(); + hostAliasesNode.forEach(node -> hostAliases.add(node.asText())); + dto.setHostAliases(hostAliases); + } + + JsonNode ipAddressesNode = rootNode.get(IP_ADDRESSES_KEY); + if (ipAddressesNode != null && ipAddressesNode.isArray()) { + List ipAddresses = new ArrayList<>(); + ipAddressesNode.forEach(node -> ipAddresses.add(node.asText())); + dto.setIpAddresses(ipAddresses); + } + + JsonNode queuesNode = rootNode.get(QUEUES_KEY); + if (queuesNode != null && queuesNode.isArray()) { + List queues = new ArrayList<>(); + queuesNode.forEach(queueNode -> { + ComputeResourceQueueDTO queue = new ComputeResourceQueueDTO(); + queue.setQueueName(getStringValue(queueNode, "queueName")); + queue.setMaxNodes(getIntegerValue(queueNode, "maxNodes")); + queue.setMaxProcessors(getIntegerValue(queueNode, "maxProcessors")); + queue.setMaxRunTime(getIntegerValue(queueNode, "maxRunTime")); + queues.add(queue); + }); + dto.setQueues(queues); + } + + // Clean description (remove UI_FIELDS part) + String cleanDescription = description.substring(0, description.indexOf("UI_FIELDS:")).trim(); + if (cleanDescription.endsWith("\n\n")) { + cleanDescription = cleanDescription.substring(0, cleanDescription.length() - 2); + } + dto.setResourceDescription(cleanDescription); + + } catch (Exception e) { + LOGGER.warn("Failed to extract compute UI fields from description", e); + } + } + + // Helper method to encode storage UI fields into description + private String encodeStorageUIFieldsIntoDescription(StorageResourceDTO dto) { + StringBuilder description = new StringBuilder(); + + // Add base description + if (dto.getStorageResourceDescription() != null) { + description.append(dto.getStorageResourceDescription()); + } + + // Add UI fields as JSON + try { + Map uiFields = new HashMap<>(); + uiFields.put(STORAGE_TYPE_KEY, dto.getStorageType()); + uiFields.put(CAPACITY_TB_KEY, dto.getCapacityTB()); + uiFields.put(ACCESS_PROTOCOL_KEY, dto.getAccessProtocol()); + uiFields.put(SUPPORTS_ENCRYPTION_KEY, dto.getSupportsEncryption()); + uiFields.put(SUPPORTS_VERSIONING_KEY, dto.getSupportsVersioning()); + + // Preserve critical fields + uiFields.put(NAME_KEY, dto.getName()); + + // S3-specific fields + if (dto.getBucketName() != null) { + uiFields.put(BUCKET_NAME_KEY, dto.getBucketName()); + } + if (dto.getAccessKey() != null) { + uiFields.put(ACCESS_KEY_KEY, dto.getAccessKey()); + } + if (dto.getSecretKey() != null) { + uiFields.put(SECRET_KEY_KEY, dto.getSecretKey()); + } + + // SCP-specific fields + if (dto.getPort() != null) { + uiFields.put(PORT_KEY, dto.getPort()); + } + if (dto.getUsername() != null) { + uiFields.put(USERNAME_KEY, dto.getUsername()); + } + if (dto.getAuthenticationMethod() != null) { + uiFields.put(AUTH_METHOD_KEY, dto.getAuthenticationMethod()); + } + if (dto.getRemotePath() != null) { + uiFields.put(REMOTE_PATH_KEY, dto.getRemotePath()); + } + + String uiFieldsJson = objectMapper.writeValueAsString(uiFields); + String result = description.toString() + "\n\nUI_FIELDS: " + uiFieldsJson; + + // Check if result exceeds database column limit + if (result.length() > 2000) { + LOGGER.warn("JSON serialization length ({}) may exceed database column limit. Consider running the database migration script.", result.length()); + } + + return result; + + } catch (Exception e) { + LOGGER.warn("Failed to encode storage UI fields", e); + return description.toString(); + } + } + + // Helper method to encode compute UI fields into description + private String encodeComputeUIFieldsIntoDescription(ComputeResourceDTO dto) { + StringBuilder description = new StringBuilder(); + + // Add base description + if (dto.getResourceDescription() != null) { + description.append(dto.getResourceDescription()); + } + + // Add UI fields as JSON + try { + Map uiFields = new HashMap<>(); + uiFields.put(COMPUTE_TYPE_KEY, dto.getComputeType()); + uiFields.put(OPERATING_SYSTEM_KEY, dto.getOperatingSystem()); + uiFields.put(SCHEDULER_TYPE_KEY, dto.getResourceJobManagerType()); // Use new field name + uiFields.put(DATA_MOVEMENT_PROTOCOL_KEY, dto.getDataMovementProtocol()); + uiFields.put(QUEUE_SYSTEM_KEY, dto.getQueueSystem()); + uiFields.put(RESOURCE_MANAGER_KEY, dto.getResourceManager()); + + // SSH configuration fields (updated field names) + uiFields.put(SSH_USERNAME_KEY, dto.getAlternativeSSHHostName()); // Repurposed field + uiFields.put(SSH_PORT_KEY, dto.getSshPort()); + uiFields.put(AUTH_METHOD_KEY, dto.getSecurityProtocol()); // Use new field name + + // Preserve critical fields that might be lost + uiFields.put(NAME_KEY, dto.getName()); + uiFields.put(HOST_ALIASES_KEY, dto.getHostAliases()); + uiFields.put(IP_ADDRESSES_KEY, dto.getIpAddresses()); + uiFields.put(QUEUES_KEY, dto.getQueues()); + + String uiFieldsJson = objectMapper.writeValueAsString(uiFields); + String result = description.toString() + "\n\nUI_FIELDS: " + uiFieldsJson; + + // Check if result exceeds database column limit (assume 255 for safety if not migrated) + if (result.length() > 2000) { + LOGGER.warn("JSON serialization length ({}) may exceed database column limit. Consider running the database migration script.", result.length()); + // Could implement compression here if needed, but for now just log the warning + } + + return result; + + } catch (Exception e) { + LOGGER.warn("Failed to encode compute UI fields", e); + return description.toString(); + } + } + + /** + * Generate a human-readable name for storage resource from hostname and description + */ + private String generateStorageResourceName(String hostName, String description) { + if (description != null && description.length() > 10) { + // Try to extract first line/sentence as name + String firstLine = description.split("\n")[0].trim(); + if (firstLine.length() > 5 && firstLine.length() < 100) { + return firstLine; + } + } + + // Fallback to hostname-based name + if (hostName != null && !hostName.trim().isEmpty()) { + String name = hostName.replace(".edu", "") + .replace(".org", "") + .replace(".com", "") + .replace("-", " ") + .replace(".", " "); + + // Capitalize words for better display + String[] words = name.split("\\s+"); + StringBuilder result = new StringBuilder(); + for (String word : words) { + if (word.length() > 0) { + if (result.length() > 0) result.append(" "); + result.append(word.substring(0, 1).toUpperCase()) + .append(word.substring(1).toLowerCase()); + } + } + return result.toString(); + } + + // Ultimate fallback if hostname is also null/empty + return "Unnamed Storage Resource"; + } + + /** + * Generate a human-readable name for compute resource from hostname and description + */ + private String generateComputeResourceName(String hostName, String description) { + if (description != null && description.length() > 10) { + // Try to extract first line/sentence as name + String firstLine = description.split("\n")[0].trim(); + if (firstLine.length() > 5 && firstLine.length() < 100) { + return firstLine; + } + } + + // Fallback to hostname-based name + if (hostName != null && !hostName.trim().isEmpty()) { + String name = hostName.replace(".edu", "") + .replace(".org", "") + .replace(".com", "") + .replace("-", " ") + .replace(".", " "); + + // Capitalize words for better display + String[] words = name.split("\\s+"); + StringBuilder result = new StringBuilder(); + for (String word : words) { + if (word.length() > 0) { + if (result.length() > 0) result.append(" "); + result.append(word.substring(0, 1).toUpperCase()) + .append(word.substring(1).toLowerCase()); + } + } + return result.toString(); + } + + // Ultimate fallback if hostname is also null/empty + return "Unnamed Compute Resource"; + } + + /** + * Extract name from description UI fields + */ + private String extractNameFromDescription(String description) { + if (description == null || !description.contains("UI_FIELDS:")) { + return null; + } + + try { + String jsonPart = description.substring(description.indexOf("UI_FIELDS:") + 10).trim(); + JsonNode rootNode = objectMapper.readTree(jsonPart); + return getStringValue(rootNode, NAME_KEY); + } catch (Exception e) { + LOGGER.warn("Failed to extract name from description", e); + return null; + } + } + + /** + * Extract name from storage description UI fields + */ + private String extractNameFromStorageDescription(String description) { + if (description == null || !description.contains("UI_FIELDS:")) { + return null; + } + + try { + String jsonPart = description.substring(description.indexOf("UI_FIELDS:") + 10).trim(); + JsonNode rootNode = objectMapper.readTree(jsonPart); + return getStringValue(rootNode, NAME_KEY); + } catch (Exception e) { + LOGGER.warn("Failed to extract name from storage description", e); + return null; + } + } + + /** + * Generate a unique interface ID for JobSubmissionInterface or DataMovementInterface + */ + private String generateInterfaceId(String prefix) { + return prefix + "_" + UUID.randomUUID().toString().replace("-", "").substring(0, 8); + } +} \ No newline at end of file diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/v2/controller/ComputeResourceController.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/v2/controller/ComputeResourceController.java new file mode 100644 index 0000000000..3893687342 --- /dev/null +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/v2/controller/ComputeResourceController.java @@ -0,0 +1,347 @@ +/** +* +* Licensed to the Apache Software Foundation (ASF) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The ASF licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, +* software distributed under the License is distributed on an +* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +* KIND, either express or implied. See the License for the +* specific language governing permissions and limitations +* under the License. +*/ +package org.apache.airavata.research.service.v2.controller; + +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.validation.Valid; +import java.util.List; +import org.apache.airavata.research.service.dto.ComputeResourceDTO; +import org.apache.airavata.research.service.handler.ComputeResourceHandler; +import org.apache.airavata.research.service.service.UserContextService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.validation.BindingResult; +import org.springframework.web.bind.annotation.DeleteMapping; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.PutMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +@RestController +@RequestMapping("/api/v2/rf/compute-resources") +@Tag(name = "Compute Resources V2", description = "V2 API for managing compute infrastructure resources") +public class ComputeResourceController { + + private static final Logger LOGGER = LoggerFactory.getLogger(ComputeResourceController.class); + + @Autowired + private ComputeResourceHandler computeResourceHandler; + + @Autowired + private UserContextService userContextService; + + @Operation(summary = "Get all compute resources") + @GetMapping("/") + @PreAuthorize("hasRole('USER') or hasRole('API_USER')") + public ResponseEntity> getComputeResources( + @RequestParam(value = "nameSearch", required = false) String nameSearch) { + + LOGGER.info("Getting compute resources - search: {}", nameSearch); + + try { + List resources; + + if (nameSearch != null && !nameSearch.trim().isEmpty()) { + resources = computeResourceHandler.searchComputeResources(nameSearch); + } else { + resources = computeResourceHandler.getAllComputeResources(); + } + + LOGGER.info("Found {} compute resources", resources.size()); + return ResponseEntity.ok(resources); + } catch (Exception e) { + LOGGER.error("Failed to get compute resources: {}", e.getMessage()); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build(); + } + } + + @Operation(summary = "Get compute resource by ID") + @GetMapping("/{id}") + @PreAuthorize("hasRole('USER') or hasRole('API_USER')") + public ResponseEntity getComputeResourceById(@PathVariable("id") String id) { + LOGGER.info("Getting compute resource by ID: {}", id); + + try { + ComputeResourceDTO resource = computeResourceHandler.getComputeResource(id); + return ResponseEntity.ok(resource); + } catch (RuntimeException e) { + if (e.getMessage().contains("not found")) { + LOGGER.warn("Compute resource not found with ID: {}", id); + return ResponseEntity.notFound().build(); + } else { + LOGGER.error("Error getting compute resource {}: {}", id, e.getMessage()); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build(); + } + } + } + + @Operation(summary = "Create new compute resource") + @PostMapping("/") + @PreAuthorize("hasRole('USER') or hasRole('API_USER')") + public ResponseEntity createComputeResource(@Valid @RequestBody ComputeResourceDTO computeResourceDTO, BindingResult bindingResult) { + LOGGER.info("Creating new compute resource: {}", computeResourceDTO.getHostName()); + + // Validation error handling + if (bindingResult.hasErrors()) { + String errorMessage = bindingResult.getFieldErrors().stream() + .map(error -> error.getField() + ": " + error.getDefaultMessage()) + .reduce((msg1, msg2) -> msg1 + ", " + msg2) + .orElse("Validation failed"); + LOGGER.error("Validation errors: {}", errorMessage); + return ResponseEntity.badRequest().body("Validation failed: " + errorMessage); + } + + // TODO: Remove setDefaultValues() as part of migration - rely on DTO validation instead + + try { + + // Set creator from authenticated user + String currentUser = userContextService.getCurrentUserId(); + // Note: ComputeResourceDTO would need a createdBy field to store this + + ComputeResourceDTO savedResource = computeResourceHandler.createComputeResource(computeResourceDTO); + LOGGER.info("Created compute resource with ID: {} by user: {}", savedResource.getComputeResourceId(), currentUser); + + return ResponseEntity.status(HttpStatus.CREATED).body(savedResource); + } catch (Exception e) { + LOGGER.error("Error creating compute resource: {}", e.getMessage(), e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body("Error creating compute resource: " + e.getMessage()); + } + } + + @Operation(summary = "Update compute resource") + @PutMapping("/{id}") + @PreAuthorize("hasRole('USER') or hasRole('API_USER')") + public ResponseEntity updateComputeResource(@PathVariable("id") String id, @Valid @RequestBody ComputeResourceDTO computeResourceDTO, BindingResult bindingResult) { + LOGGER.info("Updating compute resource with ID: {}", id); + + // Validation error handling + if (bindingResult.hasErrors()) { + String errorMessage = bindingResult.getFieldErrors().stream() + .map(error -> error.getField() + ": " + error.getDefaultMessage()) + .reduce((msg1, msg2) -> msg1 + ", " + msg2) + .orElse("Validation failed"); + LOGGER.error("Validation errors: {}", errorMessage); + return ResponseEntity.badRequest().body("Validation failed: " + errorMessage); + } + + // TODO: Remove setDefaultValues() as part of migration - rely on DTO validation instead + + try { + ComputeResourceDTO updatedResource = computeResourceHandler.updateComputeResource(id, computeResourceDTO); + LOGGER.info("Successfully updated compute resource with ID: {}", id); + + return ResponseEntity.ok(updatedResource); + } catch (Exception e) { + LOGGER.error("Error updating compute resource with ID: {}", id, e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body("Error updating compute resource: " + e.getMessage()); + } + } + + @Operation(summary = "Delete compute resource") + @DeleteMapping("/{id}") + @PreAuthorize("hasRole('USER') or hasRole('API_USER')") + public ResponseEntity deleteComputeResource(@PathVariable("id") String id) { + LOGGER.info("Deleting compute resource with ID: {}", id); + + try { + computeResourceHandler.deleteComputeResource(id); + LOGGER.info("Successfully deleted compute resource with ID: {}", id); + return ResponseEntity.ok().body("Compute resource deleted successfully"); + } catch (Exception e) { + LOGGER.error("Error deleting compute resource with ID: {}", id, e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body("Error deleting compute resource: " + e.getMessage()); + } + } + + @Operation(summary = "Search compute resources by keyword") + @GetMapping("/search") + public ResponseEntity> searchComputeResources( + @RequestParam(value = "keyword") String keyword) { + + LOGGER.info("Searching compute resources with keyword: {}", keyword); + + try { + List resources = computeResourceHandler.searchComputeResources(keyword); + LOGGER.info("Found {} compute resources matching keyword: {}", resources.size(), keyword); + return ResponseEntity.ok(resources); + } catch (Exception e) { + LOGGER.error("Error searching compute resources: {}", e.getMessage()); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build(); + } + } + + @Operation(summary = "Star/unstar a compute resource") + @PostMapping("/{id}/star") + @PreAuthorize("hasRole('USER')") + public ResponseEntity starComputeResource(@PathVariable("id") String id) { + LOGGER.info("Toggling star for compute resource with ID: {}", id); + + try { + String userId = userContextService.getCurrentUserId(); + if (computeResourceHandler.existsComputeResource(id)) { + // TODO: Implement proper v1 ResourceStar system integration + // For now, return simple toggle response + LOGGER.info("Star toggle requested for compute resource: {} by user: {} (simplified implementation)", id, userId); + return ResponseEntity.ok(true); + } else { + LOGGER.warn("Compute resource not found with ID: {}", id); + return ResponseEntity.notFound().build(); + } + } catch (Exception e) { + LOGGER.error("Error toggling compute resource star: {}", e.getMessage(), e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build(); + } + } + + @Operation(summary = "Check if user starred a compute resource") + @GetMapping("/{id}/star") + public ResponseEntity checkComputeResourceStarred(@PathVariable("id") String id) { + LOGGER.info("Checking if compute resource is starred: {}", id); + + try { + if (computeResourceHandler.existsComputeResource(id)) { + // TODO: Implement proper v1 ResourceStar system integration + LOGGER.info("Star status check for compute resource: {} (simplified implementation)", id); + return ResponseEntity.ok(false); + } else { + LOGGER.warn("Compute resource not found with ID: {}", id); + return ResponseEntity.notFound().build(); + } + } catch (Exception e) { + LOGGER.error("Error checking compute resource star status: {}", e.getMessage(), e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build(); + } + } + + @Operation(summary = "Get compute resource star count") + @GetMapping("/{id}/stars/count") + public ResponseEntity getComputeResourceStarCount(@PathVariable("id") String id) { + LOGGER.info("Getting star count for compute resource: {}", id); + + try { + if (computeResourceHandler.existsComputeResource(id)) { + // TODO: Implement proper v1 ResourceStar system integration + return ResponseEntity.ok(0); + } else { + LOGGER.warn("Compute resource not found with ID: {}", id); + return ResponseEntity.notFound().build(); + } + } catch (Exception e) { + LOGGER.error("Error getting star count: {}", e.getMessage(), e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build(); + } + } + + @Operation(summary = "Get all starred compute resources") + @GetMapping("/starred") + public ResponseEntity> getStarredComputeResources() { + LOGGER.info("Fetching starred compute resources"); + + try { + // TODO: Implement proper v1 ResourceStar system integration + // For now, return empty list + List starredResources = List.of(); + LOGGER.info("Found {} starred compute resources", starredResources.size()); + return ResponseEntity.ok(starredResources); + } catch (Exception e) { + LOGGER.error("Error fetching starred compute resources: {}", e.getMessage(), e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build(); + } + } + + /** + * Set intelligent defaults for backend-only fields not provided by UI + * UI provides core fields (hostName, name, description) - backend fills in infrastructure defaults + */ + private void setDefaultValues(ComputeResourceDTO dto) { + // Backend fills infrastructure defaults - UI provides name and resourceDescription + + // Set default compute type + if (dto.getComputeType() == null || dto.getComputeType().trim().isEmpty()) { + dto.setComputeType("HPC"); + } + + // Set default CPU cores + if (dto.getCpuCores() == null) { + dto.setCpuCores(1); + } + + // Set default memory + if (dto.getMemoryGB() == null) { + dto.setMemoryGB(1); + } + + // Set default operating system + if (dto.getOperatingSystem() == null || dto.getOperatingSystem().trim().isEmpty()) { + dto.setOperatingSystem("Linux"); + } + + // Set default queue system + if (dto.getQueueSystem() == null || dto.getQueueSystem().trim().isEmpty()) { + dto.setQueueSystem("SLURM"); + } + + // Set default resource manager + if (dto.getResourceManager() == null || dto.getResourceManager().trim().isEmpty()) { + dto.setResourceManager("Default Resource Manager"); + } + + // Set default SSH configuration (using alternative hostname field) + if (dto.getAlternativeSSHHostName() == null || dto.getAlternativeSSHHostName().trim().isEmpty()) { + dto.setAlternativeSSHHostName(dto.getHostName()); // Default to main hostname + } + + if (dto.getSshPort() == null) { + dto.setSshPort(22); + } + + if (dto.getSecurityProtocol() == null || dto.getSecurityProtocol().trim().isEmpty()) { + dto.setSecurityProtocol("SSH_KEYS"); + } + + // Working directory is no longer a direct field - handled by related entities + + // Set default resource job manager type + if (dto.getResourceJobManagerType() == null || dto.getResourceJobManagerType().trim().isEmpty()) { + dto.setResourceJobManagerType("SLURM"); + } + + // Set default data movement protocol + if (dto.getDataMovementProtocol() == null || dto.getDataMovementProtocol().trim().isEmpty()) { + dto.setDataMovementProtocol("SCP"); + } + + LOGGER.debug("Set default values for compute resource: name={}, type={}, cores={}", + dto.getName(), dto.getComputeType(), dto.getCpuCores()); + } +} \ No newline at end of file diff --git a/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/v2/controller/StorageResourceController.java b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/v2/controller/StorageResourceController.java new file mode 100644 index 0000000000..5da616a8c2 --- /dev/null +++ b/modules/research-framework/research-service/src/main/java/org/apache/airavata/research/service/v2/controller/StorageResourceController.java @@ -0,0 +1,327 @@ +/** +* +* Licensed to the Apache Software Foundation (ASF) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The ASF licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, +* software distributed under the License is distributed on an +* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +* KIND, either express or implied. See the License for the +* specific language governing permissions and limitations +* under the License. +*/ +package org.apache.airavata.research.service.v2.controller; + +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.validation.Valid; +import java.util.List; +import org.apache.airavata.research.service.dto.StorageResourceDTO; +import org.apache.airavata.research.service.handler.StorageResourceHandler; +import org.apache.airavata.research.service.service.UserContextService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.validation.BindingResult; +import org.springframework.web.bind.annotation.DeleteMapping; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.PutMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +@RestController +@RequestMapping("/api/v2/rf/storage-resources") +@Tag(name = "Storage Resources V2", description = "V2 API for managing storage infrastructure resources") +public class StorageResourceController { + + private static final Logger LOGGER = LoggerFactory.getLogger(StorageResourceController.class); + + @Autowired + private StorageResourceHandler storageResourceHandler; + + @Autowired + private UserContextService userContextService; + + @Operation(summary = "Get all storage resources") + @GetMapping("/") + @PreAuthorize("hasRole('USER') or hasRole('API_USER')") + public ResponseEntity> getStorageResources( + @RequestParam(value = "nameSearch", required = false) String nameSearch) { + + LOGGER.info("Getting storage resources - search: {}", nameSearch); + + try { + List resources; + + if (nameSearch != null && !nameSearch.trim().isEmpty()) { + resources = storageResourceHandler.searchStorageResources(nameSearch); + } else { + resources = storageResourceHandler.getAllStorageResources(); + } + + LOGGER.info("Found {} storage resources", resources.size()); + return ResponseEntity.ok(resources); + } catch (Exception e) { + LOGGER.error("Failed to get storage resources: {}", e.getMessage()); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build(); + } + } + + @Operation(summary = "Get storage resource by ID") + @GetMapping("/{id}") + @PreAuthorize("hasRole('USER') or hasRole('API_USER')") + public ResponseEntity getStorageResourceById(@PathVariable("id") String id) { + LOGGER.info("Getting storage resource by ID: {}", id); + + try { + StorageResourceDTO resource = storageResourceHandler.getStorageResource(id); + return ResponseEntity.ok(resource); + } catch (RuntimeException e) { + if (e.getMessage().contains("not found")) { + LOGGER.warn("Storage resource not found with ID: {}", id); + return ResponseEntity.notFound().build(); + } + LOGGER.error("Error getting storage resource {}: {}", id, e.getMessage()); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build(); + } + } + + @Operation(summary = "Create new storage resource") + @PostMapping("/") + @PreAuthorize("hasRole('USER') or hasRole('API_USER')") + public ResponseEntity createStorageResource(@Valid @RequestBody StorageResourceDTO storageResourceDTO, BindingResult bindingResult) { + LOGGER.info("Creating new storage resource: {}", storageResourceDTO.getHostName()); + + // Validation error handling + if (bindingResult.hasErrors()) { + String errorMessage = bindingResult.getFieldErrors().stream() + .map(error -> error.getField() + ": " + error.getDefaultMessage()) + .reduce((msg1, msg2) -> msg1 + ", " + msg2) + .orElse("Validation failed"); + LOGGER.error("Validation errors: {}", errorMessage); + return ResponseEntity.badRequest().body("Validation failed: " + errorMessage); + } + + // Set intelligent defaults for fields not provided by UI + // TODO: Remove setDefaultValues() as part of migration - rely on DTO validation instead + + try { + + StorageResourceDTO savedResource = storageResourceHandler.createStorageResource(storageResourceDTO); + LOGGER.info("Created storage resource with ID: {}", savedResource.getStorageResourceId()); + + return ResponseEntity.status(HttpStatus.CREATED).body(savedResource); + } catch (Exception e) { + LOGGER.error("Error creating storage resource: {}", e.getMessage(), e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body("Error creating storage resource: " + e.getMessage()); + } + } + + @Operation(summary = "Update storage resource") + @PutMapping("/{id}") + public ResponseEntity updateStorageResource(@PathVariable("id") String id, @Valid @RequestBody StorageResourceDTO storageResourceDTO, BindingResult bindingResult) { + LOGGER.info("Updating storage resource with ID: {}", id); + + // Validation error handling + if (bindingResult.hasErrors()) { + String errorMessage = bindingResult.getFieldErrors().stream() + .map(error -> error.getField() + ": " + error.getDefaultMessage()) + .reduce((msg1, msg2) -> msg1 + ", " + msg2) + .orElse("Validation failed"); + LOGGER.error("Validation errors: {}", errorMessage); + return ResponseEntity.badRequest().body("Validation failed: " + errorMessage); + } + + // Set intelligent defaults for fields not provided by UI + // TODO: Remove setDefaultValues() as part of migration - rely on DTO validation instead + + try { + StorageResourceDTO updatedResource = storageResourceHandler.updateStorageResource(id, storageResourceDTO); + LOGGER.info("Successfully updated storage resource with ID: {}", id); + + return ResponseEntity.ok(updatedResource); + } catch (Exception e) { + LOGGER.error("Error updating storage resource with ID: {}", id, e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body("Error updating storage resource: " + e.getMessage()); + } + } + + @Operation(summary = "Delete storage resource") + @DeleteMapping("/{id}") + public ResponseEntity deleteStorageResource(@PathVariable("id") String id) { + LOGGER.info("Deleting storage resource with ID: {}", id); + + try { + storageResourceHandler.deleteStorageResource(id); + LOGGER.info("Successfully deleted storage resource with ID: {}", id); + return ResponseEntity.ok().body("Storage resource deleted successfully"); + } catch (Exception e) { + LOGGER.error("Error deleting storage resource with ID: {}", id, e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body("Error deleting storage resource: " + e.getMessage()); + } + } + + @Operation(summary = "Search storage resources by keyword") + @GetMapping("/search") + public ResponseEntity> searchStorageResources( + @RequestParam(value = "keyword") String keyword) { + + LOGGER.info("Searching storage resources with keyword: {}", keyword); + + try { + List resources = storageResourceHandler.searchStorageResources(keyword); + LOGGER.info("Found {} storage resources matching keyword: {}", resources.size(), keyword); + return ResponseEntity.ok(resources); + } catch (Exception e) { + LOGGER.error("Error searching storage resources: {}", e.getMessage()); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build(); + } + } + + @Operation(summary = "Get storage resources by type") + @GetMapping("/type/{storageType}") + public ResponseEntity> getStorageResourcesByType( + @PathVariable("storageType") String storageType) { + + LOGGER.info("Getting storage resources by type: {}", storageType); + + try { + List resources = storageResourceHandler.getStorageResourcesByType(storageType); + LOGGER.info("Found {} storage resources of type: {}", resources.size(), storageType); + return ResponseEntity.ok(resources); + } catch (Exception e) { + LOGGER.error("Error filtering storage resources by type: {}", e.getMessage()); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build(); + } + } + + @Operation(summary = "Star/unstar a storage resource") + @PostMapping("/{id}/star") + public ResponseEntity starStorageResource(@PathVariable("id") String id) { + LOGGER.info("Toggling star for storage resource with ID: {}", id); + + try { + if (storageResourceHandler.existsStorageResource(id)) { + // TODO: Implement proper v1 ResourceStar system integration + // For now, return simple toggle response + LOGGER.info("Star toggle requested for storage resource: {} (simplified implementation)", id); + return ResponseEntity.ok(true); + } else { + LOGGER.warn("Storage resource not found with ID: {}", id); + return ResponseEntity.notFound().build(); + } + } catch (Exception e) { + LOGGER.error("Error toggling storage resource star: {}", e.getMessage(), e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build(); + } + } + + @Operation(summary = "Check if user starred a storage resource") + @GetMapping("/{id}/star") + public ResponseEntity checkStorageResourceStarred(@PathVariable("id") String id) { + LOGGER.info("Checking if storage resource is starred: {}", id); + + try { + if (storageResourceHandler.existsStorageResource(id)) { + // TODO: Implement proper v1 ResourceStar system integration + LOGGER.info("Star status check for storage resource: {} (simplified implementation)", id); + return ResponseEntity.ok(false); + } else { + LOGGER.warn("Storage resource not found with ID: {}", id); + return ResponseEntity.notFound().build(); + } + } catch (Exception e) { + LOGGER.error("Error checking storage resource star status: {}", e.getMessage(), e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build(); + } + } + + @Operation(summary = "Get storage resource star count") + @GetMapping("/{id}/stars/count") + public ResponseEntity getStorageResourceStarCount(@PathVariable("id") String id) { + LOGGER.info("Getting star count for storage resource: {}", id); + + try { + if (storageResourceHandler.existsStorageResource(id)) { + // TODO: Implement proper v1 ResourceStar system integration + return ResponseEntity.ok(0); + } else { + LOGGER.warn("Storage resource not found with ID: {}", id); + return ResponseEntity.notFound().build(); + } + } catch (Exception e) { + LOGGER.error("Error getting star count: {}", e.getMessage(), e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build(); + } + } + + @Operation(summary = "Get all starred storage resources") + @GetMapping("/starred") + public ResponseEntity> getStarredStorageResources() { + LOGGER.info("Fetching starred storage resources"); + + try { + // TODO: Implement proper v1 ResourceStar system integration + // For now, return empty list + List starredResources = List.of(); + LOGGER.info("Found {} starred storage resources", starredResources.size()); + return ResponseEntity.ok(starredResources); + } catch (Exception e) { + LOGGER.error("Error fetching starred storage resources: {}", e.getMessage(), e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build(); + } + } + + /** + * Set intelligent defaults for backend-only fields not provided by UI + * UI provides core fields (hostName, name, description) - backend fills in infrastructure defaults + */ + private void setDefaultValues(StorageResourceDTO dto) { + // Backend fills infrastructure defaults - UI provides name + + // Set default storage type + if (dto.getStorageType() == null || dto.getStorageType().trim().isEmpty()) { + dto.setStorageType("SCP"); + } + + // Set default capacity + if (dto.getCapacityTB() == null) { + dto.setCapacityTB(1L); + } + + // Set default access protocol based on storage type + if (dto.getAccessProtocol() == null || dto.getAccessProtocol().trim().isEmpty()) { + if ("S3".equalsIgnoreCase(dto.getStorageType())) { + dto.setAccessProtocol("HTTPS"); + } else { + dto.setAccessProtocol("SCP"); + } + } + + // Set default endpoint based on hostname + if (dto.getEndpoint() == null || dto.getEndpoint().trim().isEmpty()) { + String hostname = dto.getHostName(); + dto.setEndpoint(hostname != null ? hostname : "localhost"); + } + + LOGGER.debug("Set default values for storage resource: name={}, type={}, capacity={}TB", + dto.getName(), dto.getStorageType(), dto.getCapacityTB()); + } +} \ No newline at end of file diff --git a/modules/research-framework/research-service/src/main/resources/application.yml b/modules/research-framework/research-service/src/main/resources/application.yml index 0652644a64..691fcb0f55 100644 --- a/modules/research-framework/research-service/src/main/resources/application.yml +++ b/modules/research-framework/research-service/src/main/resources/application.yml @@ -19,9 +19,17 @@ grpc: port: 19908 server: - port: 18889 + port: 8080 address: 0.0.0.0 +app: + datasource: + app-catalog: + url: jdbc:mariadb://airavata.host:13306/app_catalog + username: airavata + password: 123456 + driver-class-name: org.mariadb.jdbc.Driver + airavata: research-hub: url: http://airavata.host:20000 @@ -32,11 +40,15 @@ airavata: url: http://airavata.host:5173 dev-url: http://airavata.host:5173 openid: - url: "http://airavata.host:18080/realms/default" + url: "https://auth.dev.cybershuttle.org/realms/default" user-profile: server: url: airavata.host port: 8962 + registry: + host: localhost + port: 9930 + enabled: false # Disabled for development - using direct database access spring: servlet: @@ -44,17 +56,26 @@ spring: max-file-size: 200MB max-request-size: 200MB datasource: - url: "jdbc:mariadb://airavata.host:13306/research_catalog" - username: "airavata" - password: "123456" - driver-class-name: org.mariadb.jdbc.Driver + url: "jdbc:h2:mem:testdb" + username: "sa" + password: "" + driver-class-name: org.h2.Driver hikari: pool-name: ResearchCatalogPool leak-detection-threshold: 20000 jpa: hibernate: - ddl-auto: update + ddl-auto: none # Don't modify existing app_catalog schema + properties: + hibernate: + dialect: org.hibernate.dialect.MariaDBDialect + format_sql: true open-in-view: false + show-sql: true + h2: + console: + enabled: true + path: /h2-console springdoc: api-docs: @@ -68,3 +89,20 @@ springdoc: use-pkce-with-authorization-code-grant: true client-id: data-catalog-portal +# Authentication Configuration +research: + auth: + jwks-uri: "https://auth.dev.cybershuttle.org/realms/default/protocol/openid-connect/certs" + issuer-uri: "https://auth.dev.cybershuttle.org/realms/default" + dev-api-key: "dev-research-api-key-12345" + cors: + allowed-origins: "http://localhost:5173,http://localhost:3000" + allowed-methods: "GET,POST,PUT,DELETE,OPTIONS" + allowed-headers: "*" + +# Logging for security debugging (dev profile) +logging: + level: + org.springframework.security: DEBUG + org.springframework.security.oauth2: DEBUG +