diff --git a/docs/Job Cleanup and Improvement.postman_collection.json b/docs/Job Cleanup and Improvement.postman_collection.json new file mode 100644 index 0000000..48c47af --- /dev/null +++ b/docs/Job Cleanup and Improvement.postman_collection.json @@ -0,0 +1,109 @@ +{ + "info": { + "_postman_id": "d1083801-f6dd-4220-a6c6-55ddab035754", + "name": "Job Cleanup and Improvement", + "schema": "https://schema.getpostman.com/json/collection/v2.0.0/collection.json" + }, + "item": [ + { + "name": "View challenges in challenges listing index", + "request": { + "method": "GET", + "header": [], + "body": { + "mode": "raw", + "raw": "" + }, + "url": "http://cockpit.cloud.topcoder.com:9200/challengeslisting/challenges/30005520" + }, + "response": [] + }, + { + "name": "View challenges in challenges details index", + "request": { + "method": "GET", + "header": [], + "body": { + "mode": "raw", + "raw": "" + }, + "url": "http://cockpit.cloud.topcoder.com:9200/challengesdetail/challenges/30005520" + }, + "response": [] + }, + { + "name": "View marachon match in challenges listing index", + "request": { + "method": "GET", + "header": [], + "body": { + "mode": "raw", + "raw": "" + }, + "url": "http://cockpit.cloud.topcoder.com:9200/challengeslisting/challenges/13675" + }, + "response": [] + }, + { + "name": "View marathon match in challenges details index", + "request": { + "method": "GET", + "header": [], + "body": { + "mode": "raw", + "raw": "" + }, + "url": "http://cockpit.cloud.topcoder.com:9200/challengeslisting/challenges/13675" + }, + "response": [] + }, + { + "name": "View marathon match", + "request": { + "method": "GET", + "header": [], + "body": { + "mode": "raw", + "raw": "" + }, + "url": "http://cockpit.cloud.topcoder.com:9200/mmatches/mmatches/_search" + }, + "response": [] + }, + { + "name": "View single round match", + "request": { + "method": "GET", + "header": [], + "body": { + "mode": "raw", + "raw": "" + }, + "url": "http://cockpit.cloud.topcoder.com:9200/srms/srms/_search" + }, + "response": [] + } + ], + "event": [ + { + "listen": "prerequest", + "script": { + "id": "d61f2275-4803-4bac-9787-1bfd6c2540e8", + "type": "text/javascript", + "exec": [ + "" + ] + } + }, + { + "listen": "test", + "script": { + "id": "40db868c-a50a-4b47-a1cd-73f959da9145", + "type": "text/javascript", + "exec": [ + "" + ] + } + } + ] +} \ No newline at end of file diff --git a/docs/Verification_Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement version 1.0.docx b/docs/Verification_Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement version 1.0.docx new file mode 100644 index 0000000..07f401d Binary files /dev/null and b/docs/Verification_Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement version 1.0.docx differ diff --git a/local/run.sh b/local/run.sh index ab932ce..967c46f 100644 --- a/local/run.sh +++ b/local/run.sh @@ -6,6 +6,5 @@ export TC_JWT_KEY="secret" # export AWS_SECRET_KEY=your secret export REDISSON_JOB_CLUSTER_ENABLED=false -export REDISSON_JOB_FORCE_INITIAL_LOAD=true java -jar -Duser.timezone=America/New_York ../target/elasticsearch-feeder-service-*.jar server ../src/main/resources/elasticsearch-feeder-service.yaml diff --git a/src/main/java/com/appirio/service/challengefeeder/ChallengeFeederServiceApplication.java b/src/main/java/com/appirio/service/challengefeeder/ChallengeFeederServiceApplication.java index f3546cf..58f7632 100644 --- a/src/main/java/com/appirio/service/challengefeeder/ChallengeFeederServiceApplication.java +++ b/src/main/java/com/appirio/service/challengefeeder/ChallengeFeederServiceApplication.java @@ -8,8 +8,8 @@ import com.appirio.service.challengefeeder.resources.HealthCheckResource; import com.appirio.service.challengefeeder.util.JestClientUtils; import com.appirio.service.resourcefactory.ChallengeFeederFactory; -import com.appirio.service.resourcefactory.MmFeederResourceFactory; import com.appirio.service.resourcefactory.MarathonMatchFeederFactory; +import com.appirio.service.resourcefactory.MmFeederResourceFactory; import com.appirio.service.resourcefactory.SRMFeederFactory; import com.appirio.service.supply.resources.SupplyDatasourceFactory; @@ -70,8 +70,13 @@ *
  • Add job for LoadChangedChallengeDetailJob
  • * *

    + * + *

    + * Version 1.8 - Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + * - remove the useless resources, jobs and logging of configuration values + *

    * @author TCSCODER - * @version 1.7 + * @version 1.8 */ public class ChallengeFeederServiceApplication extends BaseApplication { /** @@ -107,19 +112,47 @@ protected void logServiceSpecificConfigs(ChallengeFeederServiceConfiguration con logger.info("\t\tAWS region : " + config.getJestClientConfiguration().getAwsRegion()); logger.info("\t\tAWS service : " + config.getJestClientConfiguration().getAwsService()); - logger.info("\tRedissonConfiguration "); - logger.info("\t\tChallenges index: " + config.getRedissonConfiguration().getChallengesIndex()); - logger.info("\t\tChallenges type: " + config.getRedissonConfiguration().getChallengesType()); - logger.info("\t\tSingle server address: " + config.getRedissonConfiguration().getSingleServerAddress()); - logger.info("\t\tLast run timestamp prefix for job LoadChangedChallengesJob: " + config.getRedissonConfiguration().getLoadChangedChallengesJobLastRunTimestampPrefix()); - logger.info("\t\tLast run timestamp prefix for job MarathonMatchesJob: " + config.getRedissonConfiguration().getMarathonMatchesJobLastRunTimestampPrefix()); - logger.info("\t\tLast run timestamp prefix for job SingleRoundMatchesJob: " + config.getRedissonConfiguration().getSingleRoundMatchesJobLastRunTimestampPrefix()); - logger.info("\t\tCluster enabled: " + config.getRedissonConfiguration().isClusterEnabled()); - logger.info("\t\tLoadChangedChallengesJob Locker key name: " + config.getRedissonConfiguration().getLoadChangedChallengesJobLockerKeyName()); - logger.info("\t\tMarathonMatchesJob Locker key name: " + config.getRedissonConfiguration().getMarathonMatchesJobLockerKeyName()); - logger.info("\t\tSingleRoundMatchesJob Locker key name: " + config.getRedissonConfiguration().getSingleRoundMatchesJobLockerKeyName()); - logger.info("\t\tLock watchdog timeout: " + config.getRedissonConfiguration().getLockWatchdogTimeout()); - logger.info("\t\tNode addresses: " + config.getRedissonConfiguration().getNodeAddresses()); + logger.info("\tJobsConfiguration"); + logger.info("\t\tRedissonConfiguration"); + logger.info("\t\t\tSingle server address: " + config.getJobsConfiguration().getRedissonConfiguration().getSingleServerAddress()); + logger.info("\t\t\tCluster enabled: " + config.getJobsConfiguration().getRedissonConfiguration().isClusterEnabled()); + logger.info("\t\t\tLock watchdog timeout: " + config.getJobsConfiguration().getRedissonConfiguration().getLockWatchdogTimeout()); + logger.info("\t\t\tNode addresses: " + config.getJobsConfiguration().getRedissonConfiguration().getNodeAddresses()); + + + logger.info("\tJobs configuration"); + logger.info("\t\t\tRedisson configuration"); + logger.info("\t\t\t\t\tLock watchdog timeout : " + config.getJobsConfiguration().getRedissonConfiguration().getLockWatchdogTimeout()); + logger.info("\t\t\t\t\tSingle server address : " + config.getJobsConfiguration().getRedissonConfiguration().getSingleServerAddress()); + logger.info("\t\t\t\t\tCluster enabled : " + config.getJobsConfiguration().getRedissonConfiguration().isClusterEnabled()); + logger.info("\t\t\tNode addresses: " + config.getJobsConfiguration().getRedissonConfiguration().getNodeAddresses()); + logger.info("\t\t\t\t\tNode addresses"); + logger.info("\t\t\tLoad changed challenges listing job"); + logger.info("\t\t\t\t\tIndex name"); + logger.info("\t\t\t\t\tBatch size : " + config.getJobsConfiguration().getLoadChangedChallengesListingJob().getBatchUpdateSize()); + logger.info("\t\t\tLoad changed challenges detail job"); + logger.info("\t\t\t\t\tIndex name"); + logger.info("\t\t\t\t\tBatch size : " + config.getJobsConfiguration().getLoadChangedChallengesDetailJob().getBatchUpdateSize()); + logger.info("\t\t\tLegacymm to challenge listing job"); + logger.info("\t\t\t\t\tIndex name"); + logger.info("\t\t\t\t\tBatch size : " + config.getJobsConfiguration().getLegacyMMToChallengeListingJob().getBatchUpdateSize()); + logger.info("\t\t\t\t\tMarathon matches days to subtract : " + config.getJobsConfiguration().getLegacyMMToChallengeListingJob().getMarathonMatchesDaysToSubtract()); + logger.info("\t\t\t\t\tMarathon matches forum url : " + config.getJobsConfiguration().getLegacyMMToChallengeListingJob().getMarathonMatchesForumUrl()); + logger.info("\t\t\tLoad changedmm challenge detail job"); + logger.info("\t\t\t\t\tIndex name"); + logger.info("\t\t\t\t\tBatch size : " + config.getJobsConfiguration().getLoadChangedMMChallengeDetailJob().getBatchUpdateSize()); + logger.info("\t\t\t\t\tMarathon matches days to subtract : " + config.getJobsConfiguration().getLoadChangedMMChallengeDetailJob().getMarathonMatchesDaysToSubtract()); + logger.info("\t\t\t\t\tMarathon matches forum url : " + config.getJobsConfiguration().getLoadChangedMMChallengeDetailJob().getMarathonMatchesForumUrl()); + logger.info("\t\t\tMarathon matches job"); + logger.info("\t\t\t\t\tIndex name"); + logger.info("\t\t\t\t\tBatch size : " + config.getJobsConfiguration().getMarathonMatchesJob().getBatchUpdateSize()); + logger.info("\t\t\t\t\tMarathon matches days to subtract : " + config.getJobsConfiguration().getMarathonMatchesJob().getMarathonMatchesDaysToSubtract()); + logger.info("\t\t\t\t\tMarathon matches forum url : " + config.getJobsConfiguration().getMarathonMatchesJob().getMarathonMatchesForumUrl()); + logger.info("\t\t\tSingle round matches job"); + logger.info("\t\t\t\t\tIndex name"); + logger.info("\t\t\t\t\tBatch size : " + config.getJobsConfiguration().getSingleRoundMatchesJob().getBatchUpdateSize()); + logger.info("\t\t\t\t\tSingle round matches days to subtract : " + config.getJobsConfiguration().getSingleRoundMatchesJob().getSingleRoundMatchesDaysToSubtract()); + logger.info("\tJobs "); logger.info("\t\tJobs: " + config.getJobs()); @@ -149,9 +182,9 @@ protected void registerResources(ChallengeFeederServiceConfiguration config, Env JestClient jestClient = JestClientUtils.get(config.getJestClientConfiguration()); // Register resources here + env.jersey().register(new HealthCheckResource()); env.jersey().register(new ChallengeFeederFactory(jestClient).getResourceInstance()); env.jersey().register(new MmFeederResourceFactory(jestClient).getResourceInstance()); - env.jersey().register(new HealthCheckResource()); env.jersey().register(new MarathonMatchFeederFactory(jestClient).getResourceInstance()); env.jersey().register(new SRMFeederFactory(jestClient).getResourceInstance()); logger.info("Services registered"); @@ -182,7 +215,7 @@ public void initialize(Bootstrap bootstrap) bootstrap.setConfigurationSourceProvider( new SubstitutingSourceProvider(bootstrap.getConfigurationSourceProvider(), new EnvironmentVariableSubstitutor(false))); - bootstrap.addBundle(new JobsBundle(new StartupJob(), new LoadChangedChallengesJob(), new LoadChangedChallengesListingJob(), new LegacyMMToChallengeListingJob(), - new MarathonMatchesJob(), new SingleRoundMatchesJob(), new LegacyMMToChallengeJob(), new LoadChangedChallengesDetailJob(), new LoadChangedMMChallengeDetailJob())); + bootstrap.addBundle(new JobsBundle(new LoadChangedChallengesListingJob(), new LegacyMMToChallengeListingJob(), new MarathonMatchesJob(), + new SingleRoundMatchesJob(), new LoadChangedChallengesDetailJob(), new LoadChangedMMChallengeDetailJob())); } } diff --git a/src/main/java/com/appirio/service/challengefeeder/ChallengeFeederServiceConfiguration.java b/src/main/java/com/appirio/service/challengefeeder/ChallengeFeederServiceConfiguration.java index c19d32a..1c1255b 100644 --- a/src/main/java/com/appirio/service/challengefeeder/ChallengeFeederServiceConfiguration.java +++ b/src/main/java/com/appirio/service/challengefeeder/ChallengeFeederServiceConfiguration.java @@ -11,11 +11,12 @@ import com.appirio.service.challengefeeder.config.ChallengeConfiguration; import com.appirio.service.challengefeeder.config.CommonConfiguration; import com.appirio.service.challengefeeder.config.JestClientConfiguration; -import com.appirio.service.challengefeeder.config.RedissonConfiguration; +import com.appirio.service.challengefeeder.config.JobsConfiguration; import com.appirio.service.supply.resources.SupplyDatasourceFactory; import com.fasterxml.jackson.annotation.JsonProperty; import de.spinscale.dropwizard.jobs.JobConfiguration; +import lombok.Getter; import javax.validation.Valid; import javax.validation.constraints.NotNull; @@ -34,8 +35,13 @@ * * Version 1.3 - Topcoder ElasticSearch Feeder Service - Way To Populate Challenge-Listing Index * - add commonConfiguration + * + * Version 1.4 - Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + * - refactor the job configuration + * + * * @author TCSCODER - * @version 1.3 + * @version 1.4 */ public class ChallengeFeederServiceConfiguration extends BaseAppConfiguration implements JobConfiguration { @@ -62,10 +68,11 @@ public class ChallengeFeederServiceConfiguration extends BaseAppConfiguration im private Map jobs; /** - * Represents the redissonConfiguration field + * Represents the jobsConfiguration field */ - @JsonProperty("redissonConfiguration") - private RedissonConfiguration redissonConfiguration; + @JsonProperty("jobsConfiguration") + @Getter + private JobsConfiguration jobsConfiguration; /** * The challengeConfiguration @@ -110,14 +117,6 @@ public Map getJobs() { return jobs; } - /** - * Get redissonConfiguration - * @return the redissonConfiguration - */ - public RedissonConfiguration getRedissonConfiguration() { - return this.redissonConfiguration; - } - /** * Get challenge configuration * diff --git a/src/main/java/com/appirio/service/challengefeeder/config/BaseJobConfiguration.java b/src/main/java/com/appirio/service/challengefeeder/config/BaseJobConfiguration.java new file mode 100644 index 0000000..52c9044 --- /dev/null +++ b/src/main/java/com/appirio/service/challengefeeder/config/BaseJobConfiguration.java @@ -0,0 +1,46 @@ +package com.appirio.service.challengefeeder.config; + +import org.hibernate.validator.constraints.NotEmpty; + +import com.fasterxml.jackson.annotation.JsonProperty; + +import lombok.Getter; +import lombok.Setter; + +/** + * Represents the BaseJobConfiguration + * + * It's added in Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + * + * @author TCCoder + * @version 1.0 + * + */ +public class BaseJobConfiguration { + /** + * The index field + */ + @JsonProperty + @NotEmpty + @Getter + @Setter + private String indexName; + + /** + * The type name field + */ + @JsonProperty + @NotEmpty + @Getter + @Setter + private String typeName; + + /** + * Represents the batch update size field. + */ + @JsonProperty + @Getter + @Setter + private int batchUpdateSize; + +} diff --git a/src/main/java/com/appirio/service/challengefeeder/config/JobConfiguration.java b/src/main/java/com/appirio/service/challengefeeder/config/JobConfiguration.java new file mode 100644 index 0000000..e01dc31 --- /dev/null +++ b/src/main/java/com/appirio/service/challengefeeder/config/JobConfiguration.java @@ -0,0 +1,36 @@ +package com.appirio.service.challengefeeder.config; + +import org.hibernate.validator.constraints.NotEmpty; + +import com.fasterxml.jackson.annotation.JsonProperty; + +import lombok.Getter; +import lombok.Setter; + +/** + * Represents the JobConfiguration + * + * It's added in Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + * + * @author TCCoder + * @version 1.0 + * + */ +public class JobConfiguration { + /** + * The index field + */ + @JsonProperty + @NotEmpty + @Getter + @Setter + private String indexName; + + /** + * Represents the batch update size field. + */ + @JsonProperty + @Getter + @Setter + private int batchUpdateSize; +} diff --git a/src/main/java/com/appirio/service/challengefeeder/config/JobsConfiguration.java b/src/main/java/com/appirio/service/challengefeeder/config/JobsConfiguration.java new file mode 100644 index 0000000..824a80b --- /dev/null +++ b/src/main/java/com/appirio/service/challengefeeder/config/JobsConfiguration.java @@ -0,0 +1,66 @@ +package com.appirio.service.challengefeeder.config; + +import com.fasterxml.jackson.annotation.JsonProperty; + +import lombok.Getter; + +/** + * The JobsConfiguration is for the jobs configuration + * + * It's added in Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + * + * @author TCCoder + * @version 1.0 + * + */ +public class JobsConfiguration { + /** + * Represents the redissonConfiguration field + */ + @JsonProperty("redissonConfiguration") + @Getter + private RedissonConfiguration redissonConfiguration; + + /** + * The loadChangedChallengesListingJob field + */ + @JsonProperty("loadChangedChallengesListingJob") + @Getter + private JobConfiguration loadChangedChallengesListingJob; + + /** + * The loadChangedChallengesDetailJob field + */ + @JsonProperty("loadChangedChallengesDetailJob") + @Getter + private JobConfiguration loadChangedChallengesDetailJob; + + /** + * The legacyMMToChallengeListingJob field + */ + @JsonProperty("legacyMMToChallengeListingJob") + @Getter + private MMJobConfiguration legacyMMToChallengeListingJob; + + /** + * The loadChangedMMChallengeDetailJob field + */ + @JsonProperty("loadChangedMMChallengeDetailJob") + @Getter + private MMJobConfiguration loadChangedMMChallengeDetailJob; + + /** + * The marathonMatchesJob field + */ + @JsonProperty("marathonMatchesJob") + @Getter + private MMJobConfiguration marathonMatchesJob; + + /** + * The singleRoundMatchesJob field + */ + @JsonProperty("singleRoundMatchesJob") + @Getter + private SRMJobConfiguration singleRoundMatchesJob; + +} diff --git a/src/main/java/com/appirio/service/challengefeeder/config/MMJobConfiguration.java b/src/main/java/com/appirio/service/challengefeeder/config/MMJobConfiguration.java new file mode 100644 index 0000000..43af84f --- /dev/null +++ b/src/main/java/com/appirio/service/challengefeeder/config/MMJobConfiguration.java @@ -0,0 +1,36 @@ +package com.appirio.service.challengefeeder.config; + +import org.hibernate.validator.constraints.NotEmpty; + +import com.fasterxml.jackson.annotation.JsonProperty; + +import lombok.Getter; +import lombok.Setter; + +/** + * Represents the MMJobConfiguration + * + * It's added in Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + * + * @author TCCoder + * @version 1.0 + * + */ +public class MMJobConfiguration extends JobConfiguration { + /** + * The days to subtract in MarathonMatchesJob. + */ + @JsonProperty + @Getter + @Setter + private int marathonMatchesDaysToSubtract; + + /** + * Represents the marathonMatchesForumUrl. + */ + @JsonProperty + @NotEmpty + @Getter + @Setter + private String marathonMatchesForumUrl; +} diff --git a/src/main/java/com/appirio/service/challengefeeder/config/RedissonConfiguration.java b/src/main/java/com/appirio/service/challengefeeder/config/RedissonConfiguration.java index f1fe505..0fa655a 100644 --- a/src/main/java/com/appirio/service/challengefeeder/config/RedissonConfiguration.java +++ b/src/main/java/com/appirio/service/challengefeeder/config/RedissonConfiguration.java @@ -5,8 +5,6 @@ import java.util.List; -import org.hibernate.validator.constraints.NotEmpty; - import com.fasterxml.jackson.annotation.JsonProperty; import lombok.Getter; @@ -52,200 +50,17 @@ *
  • Added {@link #loadChangedChallengesDetailJobLockerKeyName}
  • * *

    + * + *

    + * Version 2.0 - Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + *

      + *
    • keep only redisson connection configurations
    • + *
    + *

    * @author TCCoder - * @version 1.5 + * @version 2.0 */ public class RedissonConfiguration { - - /** - * The forceInitialLoad field - */ - @JsonProperty - @Getter - @Setter - private boolean forceInitialLoad; - - /** - * The challenge index field - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String challengesIndex; - - /** - * The challenge index listing field - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String challengesListingIndex; - - /** - * The marathon match index field - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String mmIndex; - - /** - * The challenge detail index field - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String challengesDetailIndex; - - /** - * The single round match index field - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String srmsIndex; - - /** - * The challenges type field - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String challengesType; - - /** - * The challenges listing type field - * @since 1.2 - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String challengesDetailType; - - /** - * The marahon match type field - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String mmType; - - /** - * The single round match type field - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String srmsType; - - /** - * The challenge type listing field - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String challengesListingType; - - /** - * Represents the batch update size attribute. - */ - @JsonProperty - @Getter - @Setter - private int batchUpdateSize; - - /** - * Represents the last run timestamp prefix attribute for job LoadChangedChallengesListingJob. - * @since 1.2 - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String loadChangedChallengesDetailJobLastRunTimestampPrefix; - - /** - * Represents the last run timestamp prefix attribute for job LoadChangedChallengesJob. - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String loadChangedChallengesJobLastRunTimestampPrefix; - - /** - * Represents the last run timestamp prefix attribute for job LoadChangedChallengesListingJob. - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String loadChangedChallengesListingJobLastRunTimestampPrefix; - - /** - * Represents the last run timestamp prefix attribute for job MarathonMatchesJob. - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String marathonMatchesJobLastRunTimestampPrefix; - - /** - * Represents the last run timestamp prefix attribute for LegacyMMToChallengeJob - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String legacyMMJobLastRunTimestampPrefix; - - /** - * Represents the last run timestamp prefix attribute for job SingleRoundMatchesJob. - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String singleRoundMatchesJobLastRunTimestampPrefix; - - /** - * Represents the last run timestamp prefix for LegacyMMToChallengeListJob - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String legacyMMToChallengeListingJobLastRunTimestampPrefix; - - /** - * Represents the last run timestamp prefix attribute for load changed mm challenge detail job . - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String loadChangedMMChallengeDetailJobLastRunTimestampPrefix; - - /** - * Represents the locker key name attribute for load changed mm challenge detail job. - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String loadChangedMMChallengeDetailJobLockerKeyName; - /** * The cluster enabled field */ @@ -254,71 +69,6 @@ public class RedissonConfiguration { @Setter private boolean clusterEnabled; - /** - * Represents the locker key name attribute for job LoadChangedChallengesJob. - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String loadChangedChallengesJobLockerKeyName; - - /** - * Represents the locker key name attribute for job LoadChangedChallengesListingJob. - * @since 1.2 - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String loadChangedChallengesDetailJobLockerKeyName; - - - /** - * Represents the locker key name attribute for job LoadChangedChallengesListingJob. - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String loadChangedChallengesListingJobLockerKeyName; - - /** - * Represents the locker key name attribute for job MarathonMatchesJob. - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String marathonMatchesJobLockerKeyName; - - /** - * Represents the locker key name attribute for LegacyMMToChallengeJob - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String legacyMMJobLockerKeyName; - - /** - * Represents the locker key name attribute for LegacyMMToChallengeListJob - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String legacyMMToChallengeListingJobLockerKeyName; - - /** - * Represents the locker key name attribute for job SingleRoundMatchesJob. - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String singleRoundMatchesJobLockerKeyName; - /** * Represents the lock watchdog timeout attribute. */ @@ -343,29 +93,4 @@ public class RedissonConfiguration { @Setter private String singleServerAddress; - /** - * The days to subtract in MarathonMatchesJob. - */ - @JsonProperty - @Getter - @Setter - private int marathonMatchesDaysToSubtract; - - /** - * The days to subtract in SingleRoundMatchesJob. - */ - @JsonProperty - @Getter - @Setter - private int singleRoundMatchesDaysToSubtract; - - /** - * Represents the marathonMatchesForumUrl. - */ - @JsonProperty - @NotEmpty - @Getter - @Setter - private String marathonMatchesForumUrl; - } \ No newline at end of file diff --git a/src/main/java/com/appirio/service/challengefeeder/config/SRMJobConfiguration.java b/src/main/java/com/appirio/service/challengefeeder/config/SRMJobConfiguration.java new file mode 100644 index 0000000..7da4647 --- /dev/null +++ b/src/main/java/com/appirio/service/challengefeeder/config/SRMJobConfiguration.java @@ -0,0 +1,26 @@ +package com.appirio.service.challengefeeder.config; + +import com.fasterxml.jackson.annotation.JsonProperty; + +import lombok.Getter; +import lombok.Setter; + +/** + * Represents the SRMJobConfiguration + * + * It's added in Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + * + * @author TCCoder + * @version 1.0 + * + */ +public class SRMJobConfiguration extends JobConfiguration { + + /** + * The days to subtract in SingleRoundMatchesJob. + */ + @JsonProperty + @Getter + @Setter + private int singleRoundMatchesDaysToSubtract; +} diff --git a/src/main/java/com/appirio/service/challengefeeder/dao/ChallengeDetailMMFeederDAO.java b/src/main/java/com/appirio/service/challengefeeder/dao/ChallengeDetailMMFeederDAO.java index fcaf7d0..05a928b 100644 --- a/src/main/java/com/appirio/service/challengefeeder/dao/ChallengeDetailMMFeederDAO.java +++ b/src/main/java/com/appirio/service/challengefeeder/dao/ChallengeDetailMMFeederDAO.java @@ -3,14 +3,19 @@ */ package com.appirio.service.challengefeeder.dao; +import java.sql.Date; import java.util.List; +import org.skife.jdbi.v2.sqlobject.Bind; + import com.appirio.service.challengefeeder.api.challengedetail.ChallengeDetailData; import com.appirio.service.challengefeeder.api.challengedetail.RegistrantData; import com.appirio.service.challengefeeder.api.challengedetail.SubmissionData; +import com.appirio.service.challengefeeder.dto.DatabaseTimestamp; import com.appirio.supply.dataaccess.ApiQueryInput; import com.appirio.supply.dataaccess.DatasourceName; import com.appirio.supply.dataaccess.SqlQueryFile; +import com.appirio.tech.core.api.v3.TCID; import com.appirio.tech.core.api.v3.request.QueryParameter; /** @@ -18,8 +23,11 @@ * * It's added in Topcoder ElasticSearch Feeder Service - Way To Populate Challenge-Detail Index For Legacy Marathon Matches v1.0 * + * Version 1.1 - Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + * - add more methods for marathon match + * * @author TCCODER - * @version 1.1 + * @version 1.1 */ @DatasourceName("oltp") public interface ChallengeDetailMMFeederDAO { @@ -50,4 +58,21 @@ public interface ChallengeDetailMMFeederDAO { */ @SqlQueryFile("sql/mm-feeder-into-challenges/get_submissions_for_challenge_detail.sql") List getSubmissionsForChallengeDetail(@ApiQueryInput QueryParameter queryParameter); + + /** + * Get timestamp + * + * @return the result + */ + @SqlQueryFile("sql/challenge-feeder/job/get_timestamp.sql") + DatabaseTimestamp getTimestamp(); + + /** + * Get the marathon matches whose registration phase started after the specified date and after the last run timestamp. + * @param date + * @param lastRunTimestamp + * @return + */ + @SqlQueryFile("sql/mmatches-feeder/job/get_mm_registration_phase_started.sql") + List getMatchesWithRegistrationPhaseStartedIds(@Bind("initialDate") Date date, @Bind("lastRunTimestamp") Long lastRunTimestamp); } diff --git a/src/main/java/com/appirio/service/challengefeeder/dao/ChallengeFeederDAO.java b/src/main/java/com/appirio/service/challengefeeder/dao/ChallengeFeederDAO.java deleted file mode 100644 index 9f9c4c3..0000000 --- a/src/main/java/com/appirio/service/challengefeeder/dao/ChallengeFeederDAO.java +++ /dev/null @@ -1,223 +0,0 @@ -/* - * Copyright (C) 2017 TopCoder Inc., All Rights Reserved. - */ -package com.appirio.service.challengefeeder.dao; - - -import com.appirio.service.challengefeeder.api.*; -import com.appirio.service.challengefeeder.dto.DatabaseTimestamp; -import com.appirio.supply.dataaccess.ApiQueryInput; -import com.appirio.supply.dataaccess.DatasourceName; -import com.appirio.supply.dataaccess.SqlQueryFile; -import com.appirio.tech.core.api.v3.TCID; -import com.appirio.tech.core.api.v3.request.QueryParameter; - -import java.util.Date; -import java.util.List; -import java.util.Map; - -import org.skife.jdbi.v2.sqlobject.Bind; - -/** - * DAO to interact with challenge data - * - * Version 1.1 - Topcoder - Create CronJob For Populating Changed Challenges To Elasticsearch v1.0 - * - add the methods to get the changed challenge ids and current timestamp - * - * Version 1.2 - Topcoder - Elasticsearch Service - Populate Challenge Points Prize In Challenges Index - * - add the methods to get the prize points for the challenge ids - * - * @author TCCODER - * @version 1.2 - */ -@DatasourceName("oltp") -public interface ChallengeFeederDAO { - /** - * Get challenges - * - * @param queryParameter the queryParameter to use - * @return the List result - */ - @SqlQueryFile("sql/challenge-feeder/get_challenges.sql") - List getChallenges(@ApiQueryInput QueryParameter queryParameter); - - /** - * Get phases - * - * @param queryParameter the queryParameter to use - * @return the List result - */ - @SqlQueryFile("sql/challenge-feeder/get_phases.sql") - List getPhases(@ApiQueryInput QueryParameter queryParameter); - - /** - * Get resources - * - * @param queryParameter the queryParameter to use - * @return the List result - */ - @SqlQueryFile("sql/challenge-feeder/get_resources.sql") - List getResources(@ApiQueryInput QueryParameter queryParameter); - - /** - * Get events - * - * @param queryParameter the queryParameter to use - * @return the List result - */ - @SqlQueryFile("sql/challenge-feeder/get_events.sql") - List getEvents(@ApiQueryInput QueryParameter queryParameter); - - /** - * Get user ids - * - * @param queryParameter the queryParameter to use - * @return the result - */ - @SqlQueryFile("sql/challenge-feeder/get_user_ids.sql") - List> getUserIds(@ApiQueryInput QueryParameter queryParameter); - - /** - * Get group ids - * - * @param queryParameter the queryParameter to use - * @return the result - */ - @SqlQueryFile("sql/challenge-feeder/get_group_ids.sql") - List> getGroupIds(@ApiQueryInput QueryParameter queryParameter); - - /** - * Get prizes - * - * @param queryParameter the queryParameter to use - * @return the result - */ - @SqlQueryFile("sql/challenge-feeder/get_prizes.sql") - List getPrizes(@ApiQueryInput QueryParameter queryParameter); - - /** - * Get checkpoint prizes - * - * @param queryParameter the queryParameter to use - * @return the result - */ - @SqlQueryFile("sql/challenge-feeder/get_checkpoint_prizes.sql") - List getCheckpointPrizes(@ApiQueryInput QueryParameter queryParameter); - - /** - * Get winners - * - * @param queryParameter the queryParameter to use - * @return the result - */ - @SqlQueryFile("sql/challenge-feeder/get_winners.sql") - List getWinners(@ApiQueryInput QueryParameter queryParameter); - - /** - * Get submissions - * - * @param queryParameter the queryParameter to use - * @return the result - */ - @SqlQueryFile("sql/challenge-feeder/get_submissions.sql") - List getSubmissions(@ApiQueryInput QueryParameter queryParameter); - - /** - * Get properties - * - * @param queryParameter the queryParameter to use - * @return the result - */ - @SqlQueryFile("sql/challenge-feeder/get_properties.sql") - List getProperties(@ApiQueryInput QueryParameter queryParameter); - - /** - * Get reviews - * - * @param queryParameter the queryParameter to use - * @return the result - */ - @SqlQueryFile("sql/challenge-feeder/get_reviews.sql") - List getReviews(@ApiQueryInput QueryParameter queryParameter); - - /** - * Get terms - * - * @param queryParameter the queryParameter to use - * @return the result - */ - @SqlQueryFile("sql/challenge-feeder/get_terms.sql") - List getTerms(@ApiQueryInput QueryParameter queryParameter); - - /** - * Get file types - * - * @param queryParameter the queryParameter to use - * @return the result - */ - @SqlQueryFile("sql/challenge-feeder/get_file_types.sql") - List getFileTypes(@ApiQueryInput QueryParameter queryParameter); - - /** - * Get changed challenge ids - * - * @param lastRunTimestamp the lastRunTimestamp to use - * @return the List result - */ - @SqlQueryFile("sql/challenge-feeder/job/get_changed_challenge_ids.sql") - List getChangedChallengeIds(@Bind("lastRunTimestamp") Date lastRunTimestamp); - - /** - * Get timestamp - * - * @param queryParameter the queryParameter to use - * @return the result - */ - @SqlQueryFile("sql/challenge-feeder/job/get_timestamp.sql") - DatabaseTimestamp getTimestamp(); - - /** - * Get contest id, component id, and number of submissions and registrants for mm challenge - * - * @param queryParameter - * @return - */ - @SqlQueryFile("sql/challenge-feeder/get_mm_contest.sql") - List> getMMContestComponent(@ApiQueryInput QueryParameter queryParameter); - - /** - * Get legacy MM resources - * - * @param queryParameter the queryParameter to use - * @return the List result - */ - @SqlQueryFile("sql/mm-feeder-into-challenges/get_resources.sql") - List getMMResources(@ApiQueryInput QueryParameter queryParameter); - - /** - * Get prize of type prize points - * - * @param challengeId challengeId - * @return the list of prizes - */ - @SqlQueryFile("sql/challenge-feeder/get_challenge_pointsPrize.sql") - List getPointsPrize(@ApiQueryInput QueryParameter queryParameter); - - /** - * Get challenge technologies - * - * @param queryParameter the queryParameter to use - * @return the List> result - */ - @SqlQueryFile("sql/challenge-feeder/get_challenge_technologies.sql") - List> getChallengeTechnologies(@ApiQueryInput QueryParameter queryParameter); - - /** - * Get challenge plagforms - * - * @param queryParameter the queryParameter to use - * @return the List> result - */ - @SqlQueryFile("sql/challenge-feeder/get_challenge_plagforms.sql") - List> getChallengePlagforms(@ApiQueryInput QueryParameter queryParameter); -} diff --git a/src/main/java/com/appirio/service/challengefeeder/dao/ChallengeListingFeederDAO.java b/src/main/java/com/appirio/service/challengefeeder/dao/ChallengeListingFeederDAO.java index d18e0b4..3447b36 100644 --- a/src/main/java/com/appirio/service/challengefeeder/dao/ChallengeListingFeederDAO.java +++ b/src/main/java/com/appirio/service/challengefeeder/dao/ChallengeListingFeederDAO.java @@ -3,16 +3,24 @@ */ package com.appirio.service.challengefeeder.dao; +import java.util.Date; import java.util.List; import java.util.Map; +import org.skife.jdbi.v2.sqlobject.Bind; + +import com.appirio.service.challengefeeder.api.FileTypeData; +import com.appirio.service.challengefeeder.api.PhaseData; +import com.appirio.service.challengefeeder.api.PrizeData; import com.appirio.service.challengefeeder.api.UserIdData; import com.appirio.service.challengefeeder.api.challengelisting.ChallengeListingData; import com.appirio.service.challengefeeder.api.challengelisting.EventData; import com.appirio.service.challengefeeder.api.challengelisting.WinnerData; +import com.appirio.service.challengefeeder.dto.DatabaseTimestamp; import com.appirio.supply.dataaccess.ApiQueryInput; import com.appirio.supply.dataaccess.DatasourceName; import com.appirio.supply.dataaccess.SqlQueryFile; +import com.appirio.tech.core.api.v3.TCID; import com.appirio.tech.core.api.v3.request.QueryParameter; /** @@ -20,8 +28,12 @@ * * It's added in Topcoder ElasticSearch Feeder Service - Way To Populate Challenge-Listing Index v1.0 * + * Version 1.1 - Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + * - add more methods for the listing data + * + * * @author TCCoder - * @version 1.0 + * @version 1.1 * */ @DatasourceName("oltp") @@ -70,4 +82,85 @@ public interface ChallengeListingFeederDAO { */ @SqlQueryFile("sql/challenge-feeder/get_challenge-userids.sql") List getChallengeUserIds(@ApiQueryInput QueryParameter queryParameter); + + /** + * Get challenge technologies + * + * @param queryParameter the queryParameter to use + * @return the List> result + */ + @SqlQueryFile("sql/challenge-feeder/get_challenge_technologies.sql") + List> getChallengeTechnologies(@ApiQueryInput QueryParameter queryParameter); + + /** + * Get challenge platforms + * + * @param queryParameter the queryParameter to use + * @return the List> result + */ + @SqlQueryFile("sql/challenge-feeder/get_challenge_plagforms.sql") + List> getChallengePlatforms(@ApiQueryInput QueryParameter queryParameter); + + /** + * Get prize of type prize points + * + * @param challengeId challengeId + * @return the list of prizes + */ + @SqlQueryFile("sql/challenge-feeder/get_challenge_pointsPrize.sql") + List getPointsPrize(@ApiQueryInput QueryParameter queryParameter); + + /** + * Get group ids + * + * @param queryParameter the queryParameter to use + * @return the result + */ + @SqlQueryFile("sql/challenge-feeder/get_group_ids.sql") + List> getGroupIds(@ApiQueryInput QueryParameter queryParameter); + + /** + * Get phases + * + * @param queryParameter the queryParameter to use + * @return the List result + */ + @SqlQueryFile("sql/challenge-feeder/get_phases.sql") + List getPhases(@ApiQueryInput QueryParameter queryParameter); + + /** + * Get prizes + * + * @param queryParameter the queryParameter to use + * @return the result + */ + @SqlQueryFile("sql/challenge-feeder/get_prizes.sql") + List getPrizes(@ApiQueryInput QueryParameter queryParameter); + + /** + * Get file types + * + * @param queryParameter the queryParameter to use + * @return the result + */ + @SqlQueryFile("sql/challenge-feeder/get_file_types.sql") + List getFileTypes(@ApiQueryInput QueryParameter queryParameter); + + /** + * Get changed challenge ids + * + * @param lastRunTimestamp the lastRunTimestamp to use + * @return the List result + */ + @SqlQueryFile("sql/challenge-feeder/job/get_changed_challenge_ids.sql") + List getChangedChallengeIds(@Bind("lastRunTimestamp") Date lastRunTimestamp); + + /** + * Get timestamp + * + * @param queryParameter the queryParameter to use + * @return the result + */ + @SqlQueryFile("sql/challenge-feeder/job/get_timestamp.sql") + DatabaseTimestamp getTimestamp(); } diff --git a/src/main/java/com/appirio/service/challengefeeder/dao/ChallengeListingMMFeederDAO.java b/src/main/java/com/appirio/service/challengefeeder/dao/ChallengeListingMMFeederDAO.java index 4a85d89..5cfff84 100644 --- a/src/main/java/com/appirio/service/challengefeeder/dao/ChallengeListingMMFeederDAO.java +++ b/src/main/java/com/appirio/service/challengefeeder/dao/ChallengeListingMMFeederDAO.java @@ -3,22 +3,34 @@ */ package com.appirio.service.challengefeeder.dao; +import com.appirio.service.challengefeeder.api.EventData; +import com.appirio.service.challengefeeder.api.PhaseData; +import com.appirio.service.challengefeeder.api.PrizeData; import com.appirio.service.challengefeeder.api.challengelisting.ChallengeListingData; import com.appirio.service.challengefeeder.api.challengelisting.WinnerData; +import com.appirio.service.challengefeeder.dto.DatabaseTimestamp; import com.appirio.supply.dataaccess.ApiQueryInput; import com.appirio.supply.dataaccess.DatasourceName; import com.appirio.supply.dataaccess.SqlQueryFile; +import com.appirio.tech.core.api.v3.TCID; import com.appirio.tech.core.api.v3.request.QueryParameter; +import java.sql.Date; import java.util.List; +import org.skife.jdbi.v2.sqlobject.Bind; + /** * DAO to interact with marathon match data * * It's added in Topcoder ElasticSearch Feeder Service - Way To Populate Challenge-Listing Index For Legacy Marathon Matches v1.0 * + * Version 1.1 - Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + * - add more methods for the listing marathon data + * + * * @author TCCODER - * @version 1.0 + * @version 1.1 */ @DatasourceName("oltp") public interface ChallengeListingMMFeederDAO { @@ -39,4 +51,48 @@ public interface ChallengeListingMMFeederDAO { */ @SqlQueryFile("sql/mm-feeder-into-challenges/get_marathon_match_winners.sql") List getMarathonMatchWinners(@ApiQueryInput QueryParameter queryParameter); + + /** + * Get timestamp + * + * @return the result + */ + @SqlQueryFile("sql/challenge-feeder/job/get_timestamp.sql") + DatabaseTimestamp getTimestamp(); + + /** + * Get the marathon matches whose registration phase started after the specified date and after the last run timestamp. + * @param date + * @param lastRunTimestamp + * @return + */ + @SqlQueryFile("sql/mmatches-feeder/job/get_mm_registration_phase_started.sql") + List getMatchesWithRegistrationPhaseStartedIds(@Bind("initialDate") Date date, @Bind("lastRunTimestamp") Long lastRunTimestamp); + + /** + * Get phases + * + * @param queryParameter the queryParameter to use + * @return the List result + */ + @SqlQueryFile("sql/mm-feeder-into-challenges/get-phases.sql") + List getPhases(@ApiQueryInput QueryParameter queryParameter); + + /** + * Get prizes + * + * @param queryParameter the queryParameter to use + * @return the List result + */ + @SqlQueryFile("sql/mm-feeder-into-challenges/get_prizes.sql") + List getPrizes(@ApiQueryInput QueryParameter queryParameter); + + /** + * Get events + * + * @param queryParameter the queryParameter to use + * @return the List result + */ + @SqlQueryFile("sql/mm-feeder-into-challenges/get_events.sql") + List getEvents(@ApiQueryInput QueryParameter queryParameter); } diff --git a/src/main/java/com/appirio/service/challengefeeder/dao/MmFeederDAO.java b/src/main/java/com/appirio/service/challengefeeder/dao/MmFeederDAO.java deleted file mode 100644 index ff5203c..0000000 --- a/src/main/java/com/appirio/service/challengefeeder/dao/MmFeederDAO.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Copyright (C) 2018 TopCoder Inc., All Rights Reserved. - */ -package com.appirio.service.challengefeeder.dao; - -import java.sql.Date; -import java.util.*; - -import com.appirio.service.challengefeeder.api.ChallengeData; -import com.appirio.service.challengefeeder.api.EventData; -import com.appirio.service.challengefeeder.api.PhaseData; -import com.appirio.service.challengefeeder.api.PrizeData; -import com.appirio.service.challengefeeder.api.ResourceData; -import com.appirio.service.challengefeeder.api.SubmissionData; -import com.appirio.service.challengefeeder.api.TermsOfUseData; -import com.appirio.service.challengefeeder.dto.*; -import com.appirio.supply.dataaccess.ApiQueryInput; -import com.appirio.supply.dataaccess.DatasourceName; -import com.appirio.supply.dataaccess.SqlQueryFile; -import com.appirio.tech.core.api.v3.*; -import com.appirio.tech.core.api.v3.request.QueryParameter; -import org.skife.jdbi.v2.sqlobject.*; - -/** - * DAO to interact with marathon match data - * - * It's added in Topcoder - Populate Marathon Match Related Data Into Challenge Model In Elasticsearch v1.0 - * - * @author TCCODER - * @version 1.0 - */ -@DatasourceName("oltp") -public interface MmFeederDAO { - /** - * Get marathon matches - * - * @param queryParameter the queryParameter to use - * @return the List result - */ - @SqlQueryFile("sql/mm-feeder-into-challenges/get_marathon_matches.sql") - List getMarathonMatches(@ApiQueryInput QueryParameter queryParameter); - - /** - * Get submissions - * - * @param queryParameter the queryParameter to use - * @return the List result - */ - @SqlQueryFile("sql/mm-feeder-into-challenges/get_submissions.sql") - List getSubmissions(@ApiQueryInput QueryParameter queryParameter); - - /** - * Get terms - * - * @param queryParameter the queryParameter to use - * @return the List result - */ - @SqlQueryFile("sql/mm-feeder-into-challenges/get-terms.sql") - List getTerms(@ApiQueryInput QueryParameter queryParameter); - - /** - * Get events - * - * @param queryParameter the queryParameter to use - * @return the List result - */ - @SqlQueryFile("sql/mm-feeder-into-challenges/get_events.sql") - List getEvents(@ApiQueryInput QueryParameter queryParameter); - - /** - * Get resources - * - * @param queryParameter the queryParameter to use - * @return the List result - */ - @SqlQueryFile("sql/mm-feeder-into-challenges/get_resources.sql") - List getResources(@ApiQueryInput QueryParameter queryParameter); - - /** - * Get phases - * - * @param queryParameter the queryParameter to use - * @return the List result - */ - @SqlQueryFile("sql/mm-feeder-into-challenges/get-phases.sql") - List getPhases(@ApiQueryInput QueryParameter queryParameter); - - /** - * Get prizes - * - * @param queryParameter the queryParameter to use - * @return the List result - */ - @SqlQueryFile("sql/mm-feeder-into-challenges/get_prizes.sql") - List getPrizes(@ApiQueryInput QueryParameter queryParameter); - - /** - * Get timestamp - * - * @return the result - */ - @SqlQueryFile("sql/challenge-feeder/job/get_timestamp.sql") - DatabaseTimestamp getTimestamp(); - - /** - * Get the marathon matches whose registration phase started after the specified date and after the last run timestamp. - * @param date - * @param lastRunTimestamp - * @return - */ - @SqlQueryFile("sql/mmatches-feeder/job/get_mm_registration_phase_started.sql") - List getMatchesWithRegistrationPhaseStartedIds(@Bind("initialDate") Date date, @Bind("lastRunTimestamp") Long lastRunTimestamp); -} diff --git a/src/main/java/com/appirio/service/challengefeeder/job/BaseJob.java b/src/main/java/com/appirio/service/challengefeeder/job/BaseJob.java index 738cbea..caf0a5d 100644 --- a/src/main/java/com/appirio/service/challengefeeder/job/BaseJob.java +++ b/src/main/java/com/appirio/service/challengefeeder/job/BaseJob.java @@ -6,55 +6,258 @@ import java.text.DateFormat; import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; import java.util.TimeZone; +import org.quartz.JobExecutionContext; +import org.quartz.JobExecutionException; +import org.redisson.Redisson; +import org.redisson.api.RLock; +import org.redisson.api.RMap; +import org.redisson.api.RedissonClient; +import org.redisson.config.Config; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import com.appirio.service.challengefeeder.ChallengeFeederServiceConfiguration; +import com.appirio.supply.SupplyException; +import com.appirio.tech.core.api.v3.TCID; import de.spinscale.dropwizard.jobs.Job; -import lombok.NoArgsConstructor; /** * BaseJob abstract class used as base class to children job classes. * * It was added in Topcoder - Create CronJob For Populating Marathon Matches and SRMs To Elasticsearch v1.0 * + * Version 2.0 - Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + * - change DATE_FORMAT to non-static variable as SimpleDateFormat is not thread safe + * - abstract out common logic for the sub classes to implement + * + * * @author TCCoder - * @version 1.0 + * @version 2.0 * */ -@NoArgsConstructor public abstract class BaseJob extends Job { - + /** + * The JOB_ENABLE_SUFFIX field + */ + public static final String JOB_ENABLE_SUFFIX = ".job.enable"; + + /** + * The JOB_LAST_RUN_TIMESTAMP_SUFFIX field + */ + public static final String JOB_LAST_RUN_TIMESTAMP_SUFFIX = ".job.lastrun.timestamp"; + + /** + * The JOB_LOCKER_NAME_SUFFIX field + */ + public static final String JOB_LOCKER_NAME_SUFFIX = ".job.locker"; + /** * The GLOBAL_CONFIGURATION field. */ public static ChallengeFeederServiceConfiguration GLOBAL_CONFIGURATION; + /** + * Logger used to log events + */ + private static final Logger logger = LoggerFactory.getLogger(BaseJob.class); + /** * The initial timestamp constant. */ protected static final long INITIAL_TIMESTAMP = 1L; - + /** * The DATE_FORMAT field. */ - protected static final DateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); - static { - DATE_FORMAT.setTimeZone(TimeZone.getTimeZone("UTC")); - } + protected final DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); /** * The config field. */ protected ChallengeFeederServiceConfiguration config; + + /** + * The indexName field + */ + protected String indexName; + + /** + * The typeName field + */ + protected String typeName; + + /** + * The batchSize field + */ + protected int batchSize; + + /** + * The lastRuntimestampeKey field + */ + protected String lastRuntimestampeKey; + + /** + * The lockerKey field + */ + protected String lockerKey; + + /** + * The jobEnableKey field + */ + protected String jobEnableKey; /** * BaseJob constructor. * - * @param config the config to use. */ - public BaseJob(ChallengeFeederServiceConfiguration config) { - this.config = config; + public BaseJob() { + this.config = GLOBAL_CONFIGURATION; + dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); + } + + /** + * Do job. This methods load the data to elastic services. + * + * @param context the context to use + * @throws JobExecutionException if any error occurs + */ + @Override + public void doJob(JobExecutionContext context) throws JobExecutionException { + RLock lock; + RedissonClient redisson = null; + try { + this.init(); + redisson = this.getRedissonClient(); + + RMap mapCache = redisson.getMap(this.getClass().getName() + ".map.cache"); + + String enable = mapCache.get(this.jobEnableKey); + logger.info("The " + this.getClass().getName() + " is enable:" + (enable == null ? "false" : enable)); + if ("true".equalsIgnoreCase(enable)) { + logger.info("Try to get the lock for " + this.getClass().getName() + " by the locker key:" + this.lockerKey); + lock = redisson.getLock(this.lockerKey); + if (lock.tryLock()) { + logger.info("Get the lock successfully for " + this.getClass().getName()); + try { + String timestamp = mapCache.get(this.lastRuntimestampeKey); + Date lastRunTimestamp = new Date(1L); + if (timestamp != null) { + lastRunTimestamp = dateFormat.parse(timestamp); + } + + logger.info("The last run timestamp for " + this.getClass().getName() + " is:" + timestamp); + + Date currentTime = this.getTimestamp(); + List ids = this.getFeederIdsToPush(lastRunTimestamp); + logger.info("The count of the ids to load for " + this.getClass().getName() + ":" + ids.size()); + logger.info("The ids to load for " + this.getClass().getName() + ":" + ids); + + int to = 0; + int from = 0; + while (to < ids.size()) { + to += (to + batchSize) > ids.size() ? (ids.size() - to) : batchSize; + List sub = ids.subList(from, to); + this.pushFeeders(sub); + from = to; + } + + logger.info("update last run timestamp for " + this.getClass().getName() + " is:" + dateFormat.format(currentTime)); + mapCache.put(this.lastRuntimestampeKey, dateFormat.format(currentTime)); + } finally { + logger.info("release the lock for " + this.getClass().getName()); + lock.unlock(); + } + } else { + logger.warn("the previous " + this.getClass().getName() + " is still running"); + } + } + + } catch (Exception exp) { + exp.printStackTrace(); + } finally { + if (redisson != null) { + redisson.shutdown(); + } + } + } + + /** + * Initialize the job. + * It will be called before the job is executed. + * + * @throws SupplyException if any error occurs + */ + protected void init() throws SupplyException { + if (this.config == null) { + this.config = GLOBAL_CONFIGURATION; + } } + /** + * Get current timestamp + * + * @throws SupplyException if any error occurs + * @return the Date result + */ + abstract protected Date getTimestamp() throws SupplyException; + + /** + * Push feeders + * + * @param ids the ids to use + * @throws SupplyException if any error occurs + */ + abstract protected void pushFeeders(List ids) throws SupplyException ; + + /** + * Get feeder ids to push + * + * @param lastRunTimestamp the lastRunTimestamp to use + * @throws SupplyException if any error occurs + * @return the List result + */ + abstract protected List getFeederIdsToPush(Date lastRunTimestamp) throws SupplyException ; + + /** + * Get redisson client + * + * @return the RedissonClient result + */ + protected RedissonClient getRedissonClient() { + Config redissonConfig = new Config(); + redissonConfig.setLockWatchdogTimeout(this.config.getJobsConfiguration().getRedissonConfiguration().getLockWatchdogTimeout()); + if (this.config.getJobsConfiguration().getRedissonConfiguration().isClusterEnabled()) { + for (String addr : this.config.getJobsConfiguration().getRedissonConfiguration().getNodeAddresses()) { + redissonConfig.useClusterServers().addNodeAddress(addr); + } + } else { + redissonConfig.useSingleServer().setAddress(this.config.getJobsConfiguration().getRedissonConfiguration().getSingleServerAddress()); + } + + return Redisson.create(redissonConfig); + } + + /** + * Convert TCID + * + * @param ids the ids to use + * @return the List result + */ + public static List convertTCID(List ids) { + if (ids == null) { + return null; + } + List result = new ArrayList(); + for (TCID id : ids) { + result.add(Long.parseLong(id.getId())); + } + + return result; + } } diff --git a/src/main/java/com/appirio/service/challengefeeder/job/LegacyMMToChallengeJob.java b/src/main/java/com/appirio/service/challengefeeder/job/LegacyMMToChallengeJob.java deleted file mode 100644 index 23cf986..0000000 --- a/src/main/java/com/appirio/service/challengefeeder/job/LegacyMMToChallengeJob.java +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Copyright (C) 2018 TopCoder Inc., All Rights Reserved. - */ -package com.appirio.service.challengefeeder.job; - -import java.util.ArrayList; -import java.util.Calendar; -import java.util.Date; -import java.util.List; - -import com.appirio.service.challengefeeder.dao.*; -import com.appirio.service.challengefeeder.dto.*; -import com.appirio.service.challengefeeder.manager.*; -import org.quartz.JobExecutionContext; -import org.quartz.JobExecutionException; -import org.redisson.Redisson; -import org.redisson.api.RLock; -import org.redisson.api.RMapCache; -import org.redisson.api.RedissonClient; -import org.redisson.config.Config; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.appirio.service.challengefeeder.ChallengeFeederServiceConfiguration; -import com.appirio.service.challengefeeder.util.JestClientUtils; -import com.appirio.supply.DAOFactory; -import com.appirio.tech.core.api.v3.TCID; - -import de.spinscale.dropwizard.jobs.annotations.DelayStart; -import de.spinscale.dropwizard.jobs.annotations.Every; -import lombok.NoArgsConstructor; - -/** - * LegacyMMToChallengeJob is used to populate legacy marathon matches to elasticsearch challenge index. - * - * @author TCCoder - * @version 1.0 - * - */ -@DelayStart("20s") -@Every("${com.appirio.service.challengefeeder.job.LegacyMMToChallengeJob}") -@NoArgsConstructor -public class LegacyMMToChallengeJob extends BaseJob { - - /** - * The marathon match - challenge feeder manager instance. - */ - private MmFeederManager manager; - - /** - * Logger used to log events - */ - private static final Logger logger = LoggerFactory.getLogger(LegacyMMToChallengeJob.class); - - /** - * LegacyMMToChallengeJob constructor. - * - * @param manager the MmFeederManager to use - * @param config the config to use - */ - public LegacyMMToChallengeJob(MmFeederManager manager, ChallengeFeederServiceConfiguration config) { - super(config); - this.manager = manager; - } - - /** - * Do job. Load the legacy marathon matches to elasticsearch service. - * - * @param context the job context to use - * @throws JobExecutionException if any error occurs - */ - @Override - public void doJob(JobExecutionContext context) throws JobExecutionException { - RLock lock = null; - RedissonClient redisson = null; - try { - if (this.manager == null) { - this.manager = new MmFeederManager(JestClientUtils.get(GLOBAL_CONFIGURATION.getJestClientConfiguration()), DAOFactory.getInstance().createDAO(MmFeederDAO.class)); - } - if (this.config == null) { - this.config = GLOBAL_CONFIGURATION; - } - Config redissonConfig = new Config(); - redissonConfig.setLockWatchdogTimeout(this.config.getRedissonConfiguration().getLockWatchdogTimeout()); - if (this.config.getRedissonConfiguration().isClusterEnabled()) { - for (String addr : this.config.getRedissonConfiguration().getNodeAddresses()) { - redissonConfig.useClusterServers().addNodeAddress(addr); - } - } else { - redissonConfig.useSingleServer().setAddress(this.config.getRedissonConfiguration().getSingleServerAddress()); - } - - logger.info("Try to get the lock for legacy marathon matches job"); - redisson = Redisson.create(redissonConfig); - lock = redisson.getLock(config.getRedissonConfiguration().getLegacyMMJobLockerKeyName()); - if (lock.tryLock()) { - logger.info("Get the lock for legacy marathon matches job successfully"); - try { - RMapCache mapCache = redisson.getMapCache(config.getRedissonConfiguration().getLegacyMMJobLastRunTimestampPrefix()); - String timestamp = mapCache.get(config.getRedissonConfiguration().getLegacyMMJobLastRunTimestampPrefix()); - - Date lastRunTimestamp = new Date(INITIAL_TIMESTAMP); - if (timestamp != null) { - lastRunTimestamp = DATE_FORMAT.parse(timestamp); - } - - logger.info("The last run timestamp for marathon matches job is:" + lastRunTimestamp); - - Date currentTimestamp = new Date(); - Calendar calendar = Calendar.getInstance(); - calendar.setTime(currentTimestamp); - calendar.add(Calendar.DAY_OF_MONTH, this.config.getRedissonConfiguration().getMarathonMatchesDaysToSubtract()); - Date dateParam = calendar.getTime(); - - logger.info("The initial timestamp for legacy marathon matches job is:" + dateParam); - - List totalIds = this.manager.getMatchesWithRegistrationPhaseStartedIds(new java.sql.Date(dateParam.getTime()), lastRunTimestamp.getTime()); - - List ids = new ArrayList<>(); - for (int i = 0; i < totalIds.size(); ++i) { - ids.add(Long.parseLong(totalIds.get(i).getId())); - } - - logger.info("The count of the MM ids to load:" + ids.size()); - logger.info("The MM ids to load:" + ids); - - int batchSize = this.config.getRedissonConfiguration().getBatchUpdateSize(); - int to = 0; - int from = 0; - while (to < ids.size()) { - to += (to + batchSize) > ids.size() ? (ids.size() - to) : batchSize; - List sub = ids.subList(from, to); - MmFeederParam param = new MmFeederParam(); - param.setIndex(this.config.getRedissonConfiguration().getChallengesIndex()); - param.setType(this.config.getRedissonConfiguration().getChallengesType()); - param.setRoundIds(sub); - param.setLegacy(Boolean.TRUE); - try { - this.manager.pushMarathonMatchDataIntoChallenge(param); - } catch(Exception e) { - e.printStackTrace(); - } - from = to; - } - - // mark last execution as current timestamp - logger.info("update last run timestamp for challenges job is:" + currentTimestamp); - mapCache.put(config.getRedissonConfiguration().getLegacyMMJobLastRunTimestampPrefix(), DATE_FORMAT.format(currentTimestamp)); - } finally { - logger.info("release the lock for legacy marathon matches job"); - lock.unlock(); - } - } else { - logger.warn("the previous job for legacy marathon matches job is still running"); - } - } catch(Exception exp) { - exp.printStackTrace(); - } finally { - if (redisson != null) { - redisson.shutdown(); - } - } - } - -} diff --git a/src/main/java/com/appirio/service/challengefeeder/job/LegacyMMToChallengeListingJob.java b/src/main/java/com/appirio/service/challengefeeder/job/LegacyMMToChallengeListingJob.java index 6e08c96..64d9fe9 100644 --- a/src/main/java/com/appirio/service/challengefeeder/job/LegacyMMToChallengeListingJob.java +++ b/src/main/java/com/appirio/service/challengefeeder/job/LegacyMMToChallengeListingJob.java @@ -3,28 +3,16 @@ */ package com.appirio.service.challengefeeder.job; -import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.List; -import org.quartz.JobExecutionContext; -import org.quartz.JobExecutionException; -import org.redisson.Redisson; -import org.redisson.api.RLock; -import org.redisson.api.RMapCache; -import org.redisson.api.RedissonClient; -import org.redisson.config.Config; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.appirio.service.challengefeeder.ChallengeFeederServiceConfiguration; import com.appirio.service.challengefeeder.dao.ChallengeListingMMFeederDAO; -import com.appirio.service.challengefeeder.dao.MmFeederDAO; import com.appirio.service.challengefeeder.dto.MmFeederParam; import com.appirio.service.challengefeeder.manager.ChallengeListingMMFeederManager; import com.appirio.service.challengefeeder.util.JestClientUtils; import com.appirio.supply.DAOFactory; +import com.appirio.supply.SupplyException; import com.appirio.tech.core.api.v3.TCID; import de.spinscale.dropwizard.jobs.annotations.DelayStart; @@ -33,9 +21,13 @@ /** * LegacyMMToChallengeListingJob is used to populate legacy marathon matches to elasticsearch challenge listing index. + * + * Version 2.0 - Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + * - refactor it to use the new configuration + * * * @author TCCoder - * @version 1.0 + * @version 2.0 * */ @DelayStart("19s") @@ -49,120 +41,78 @@ public class LegacyMMToChallengeListingJob extends BaseJob { private ChallengeListingMMFeederManager manager; /** - * Logger used to log events + * Init the job + * + * @throws SupplyException if any error occurs */ - private static final Logger logger = LoggerFactory.getLogger(LegacyMMToChallengeListingJob.class); + @Override + protected void init() throws SupplyException { + super.init(); + if (this.manager == null) { + this.manager = new ChallengeListingMMFeederManager(JestClientUtils.get(this.config.getJestClientConfiguration()), + DAOFactory.getInstance().createDAO(ChallengeListingMMFeederDAO.class), + this.config.getJobsConfiguration().getLegacyMMToChallengeListingJob().getMarathonMatchesForumUrl()); + } + if (this.indexName == null) { + this.indexName = this.config.getJobsConfiguration().getLegacyMMToChallengeListingJob().getIndexName(); + this.typeName = "challenges"; + this.lastRuntimestampeKey = this.indexName + BaseJob.JOB_LAST_RUN_TIMESTAMP_SUFFIX; + this.lockerKey = this.indexName + BaseJob.JOB_LOCKER_NAME_SUFFIX; + this.jobEnableKey = this.indexName + BaseJob.JOB_ENABLE_SUFFIX; + this.batchSize = this.config.getJobsConfiguration().getLegacyMMToChallengeListingJob().getBatchUpdateSize(); + } + + } /** - * LegacyMMToChallengeListingJob constructor. + * Get timestamp * - * @param manager the ChallengeListingMmFeederManager to use - * @param config the config to use + * @throws SupplyException if any error occurs + * @return the Date result */ - public LegacyMMToChallengeListingJob(ChallengeListingMMFeederManager manager, ChallengeFeederServiceConfiguration config) { - super(config); - this.manager = manager; + @Override + protected Date getTimestamp() throws SupplyException { + return this.manager.getTimestamp(); } /** - * Do job. Load the legacy marathon matches to elasticsearch service. + * Push feeders * - * @param context the job context to use - * @throws JobExecutionException if any error occurs + * @param ids the ids to use + * @throws SupplyException if any error occurs */ @Override - public void doJob(JobExecutionContext context) throws JobExecutionException { - RLock lock = null; - RedissonClient redisson = null; + protected void pushFeeders(List ids) throws SupplyException { + MmFeederParam param = new MmFeederParam(); + param.setIndex(this.indexName); + param.setType(this.typeName); + param.setRoundIds(ids); + param.setLegacy(Boolean.TRUE); try { - if (this.manager == null) { - this.manager = new ChallengeListingMMFeederManager(JestClientUtils.get(GLOBAL_CONFIGURATION.getJestClientConfiguration()), - DAOFactory.getInstance().createDAO(MmFeederDAO.class), DAOFactory.getInstance().createDAO(ChallengeListingMMFeederDAO.class), - GLOBAL_CONFIGURATION.getRedissonConfiguration().getMarathonMatchesForumUrl()); - } - if (this.config == null) { - this.config = GLOBAL_CONFIGURATION; - } - Config redissonConfig = new Config(); - redissonConfig.setLockWatchdogTimeout(this.config.getRedissonConfiguration().getLockWatchdogTimeout()); - if (this.config.getRedissonConfiguration().isClusterEnabled()) { - for (String addr : this.config.getRedissonConfiguration().getNodeAddresses()) { - redissonConfig.useClusterServers().addNodeAddress(addr); - } - } else { - redissonConfig.useSingleServer().setAddress(this.config.getRedissonConfiguration().getSingleServerAddress()); - } - - logger.info("Try to get the lock for legacy marathon matches to challenge listing job"); - redisson = Redisson.create(redissonConfig); - lock = redisson.getLock(config.getRedissonConfiguration().getLegacyMMToChallengeListingJobLockerKeyName()); - if (lock.tryLock()) { - logger.info("Get the lock for legacy marathon matches to challenge listing job successfully"); - try { - RMapCache mapCache = redisson.getMapCache(config.getRedissonConfiguration().getLegacyMMToChallengeListingJobLastRunTimestampPrefix()); - String timestamp = mapCache.get(config.getRedissonConfiguration().getLegacyMMToChallengeListingJobLastRunTimestampPrefix()); - - Date lastRunTimestamp = new Date(INITIAL_TIMESTAMP); - if (timestamp != null) { - lastRunTimestamp = DATE_FORMAT.parse(timestamp); - } - - logger.info("The last run timestamp for marathon matches to challenge listing job is:" + lastRunTimestamp); - - Date currentTimestamp = new Date(); - Calendar calendar = Calendar.getInstance(); - calendar.setTime(currentTimestamp); - calendar.add(Calendar.DAY_OF_MONTH, this.config.getRedissonConfiguration().getMarathonMatchesDaysToSubtract()); - Date dateParam = calendar.getTime(); - - logger.info("The initial timestamp for legacy marathon matches to challenge listing job is:" + dateParam); - - List totalIds = this.manager.getMatchesWithRegistrationPhaseStartedIds(new java.sql.Date(dateParam.getTime()), lastRunTimestamp.getTime()); - - List ids = new ArrayList<>(); - for (int i = 0; i < totalIds.size(); ++i) { - ids.add(Long.parseLong(totalIds.get(i).getId())); - } - - logger.info("The count of the MM ids to load:" + ids.size()); - logger.info("The MM ids to load:" + ids); - - int batchSize = this.config.getRedissonConfiguration().getBatchUpdateSize(); - int to = 0; - int from = 0; - while (to < ids.size()) { - to += (to + batchSize) > ids.size() ? (ids.size() - to) : batchSize; - List sub = ids.subList(from, to); - MmFeederParam param = new MmFeederParam(); - param.setIndex(this.config.getRedissonConfiguration().getChallengesListingIndex()); - param.setType(this.config.getRedissonConfiguration().getChallengesListingType()); - param.setRoundIds(sub); - param.setLegacy(Boolean.TRUE); - try { - this.manager.pushMarathonMatchDataIntoChallenge(param); - } catch(Exception e) { - e.printStackTrace(); - } - from = to; - } - - // mark last execution as current timestamp - logger.info("update last run timestamp for legacy marathon matches to challenge listing job is:" + currentTimestamp); - mapCache.put(config.getRedissonConfiguration().getLegacyMMToChallengeListingJobLastRunTimestampPrefix(), DATE_FORMAT.format(currentTimestamp)); - } finally { - logger.info("release the lock for legacy marathon matches to challenge listing job"); - lock.unlock(); - } - } else { - logger.warn("the previous job for legacy marathon matches to challenge listing job is still running"); - } - } catch(Exception exp) { - exp.printStackTrace(); - } finally { - if (redisson != null) { - redisson.shutdown(); - } + this.manager.pushMarathonMatchDataIntoChallenge(param); + } catch(Exception e) { + e.printStackTrace(); } } + /** + * Get feeder ids to push + * + * @param lastRunTimestamp the lastRunTimestamp to use + * @throws SupplyException if any error occurs + * @return the List result + */ + @Override + protected List getFeederIdsToPush(Date lastRunTimestamp) throws SupplyException { + Date currentTimestamp = new Date(); + Calendar calendar = Calendar.getInstance(); + calendar.setTime(currentTimestamp); + calendar.add(Calendar.DAY_OF_MONTH, this.config.getJobsConfiguration().getLegacyMMToChallengeListingJob().getMarathonMatchesDaysToSubtract()); + Date dateParam = calendar.getTime(); + + List totalIds = this.manager.getMatchesWithRegistrationPhaseStartedIds(new java.sql.Date(dateParam.getTime()), lastRunTimestamp.getTime()); + + return convertTCID(totalIds); + } + } diff --git a/src/main/java/com/appirio/service/challengefeeder/job/LoadChangedChallengesDetailJob.java b/src/main/java/com/appirio/service/challengefeeder/job/LoadChangedChallengesDetailJob.java index a77d9ed..cb8e033 100644 --- a/src/main/java/com/appirio/service/challengefeeder/job/LoadChangedChallengesDetailJob.java +++ b/src/main/java/com/appirio/service/challengefeeder/job/LoadChangedChallengesDetailJob.java @@ -3,156 +3,104 @@ */ package com.appirio.service.challengefeeder.job; -import com.appirio.service.challengefeeder.ChallengeFeederServiceConfiguration; import com.appirio.service.challengefeeder.dao.ChallengeDetailFeederDAO; import com.appirio.service.challengefeeder.dto.ChallengeFeederParam; import com.appirio.service.challengefeeder.manager.ChallengeDetailFeederManager; import com.appirio.service.challengefeeder.util.JestClientUtils; import com.appirio.supply.DAOFactory; +import com.appirio.supply.SupplyException; import com.appirio.tech.core.api.v3.TCID; import de.spinscale.dropwizard.jobs.annotations.DelayStart; import de.spinscale.dropwizard.jobs.annotations.Every; -import org.quartz.JobExecutionContext; -import org.quartz.JobExecutionException; -import org.redisson.Redisson; -import org.redisson.api.RLock; -import org.redisson.api.RMapCache; -import org.redisson.api.RedissonClient; -import org.redisson.config.Config; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.NoArgsConstructor; -import java.util.ArrayList; import java.util.Date; import java.util.List; +/** + * LoadChangedChallengesDetailJob is used to load the challenge details data into the details index + * + * Version 2.0 - Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + * - refactor it to use the new configuration + * + * + * @author TCCoder + * @version 2.0 + * + */ @DelayStart("16s") @Every("${com.appirio.service.challengefeeder.job.LoadChangedChallengesDetailJob}") +@NoArgsConstructor public class LoadChangedChallengesDetailJob extends BaseJob { - /** - * Logger used to log events - */ - private static final Logger logger = LoggerFactory.getLogger(LoadChangedChallengesDetailJob.class); - /** * The challengeDetailFeederManager field */ private ChallengeDetailFeederManager challengeDetailFeederManager; - - /** - * Create LoadChangedChallengesDetailJob + * Init job * - * @param challengeDetailFeederManager the challengeDetailFeederManager to use - * @param config the config to use + * @throws SupplyException if any error occurs */ - public LoadChangedChallengesDetailJob(ChallengeDetailFeederManager challengeDetailFeederManager, ChallengeFeederServiceConfiguration config) { - this.challengeDetailFeederManager = challengeDetailFeederManager; - this.config = config; + @Override + protected void init() throws SupplyException { + super.init(); + if (this.challengeDetailFeederManager == null) { + this.challengeDetailFeederManager = new ChallengeDetailFeederManager(JestClientUtils.get( + this.config.getJestClientConfiguration()), + DAOFactory.getInstance().createDAO(ChallengeDetailFeederDAO.class)); + } + if (this.indexName == null) { + this.indexName = this.config.getJobsConfiguration().getLoadChangedChallengesDetailJob().getIndexName(); + this.typeName = "challenges"; + this.lastRuntimestampeKey = this.indexName + BaseJob.JOB_LAST_RUN_TIMESTAMP_SUFFIX; + this.lockerKey = this.indexName + BaseJob.JOB_LOCKER_NAME_SUFFIX; + this.jobEnableKey = this.indexName + BaseJob.JOB_ENABLE_SUFFIX; + this.batchSize = this.config.getJobsConfiguration().getLoadChangedChallengesDetailJob().getBatchUpdateSize(); + } } /** - * Create LoadChangedChallengesDetailJob + * Get timestamp * + * @throws SupplyException if any error occurs + * @return the Date result */ - public LoadChangedChallengesDetailJob() { + @Override + protected Date getTimestamp() throws SupplyException { + return this.challengeDetailFeederManager.getTimestamp(); } /** - * Do job. This methods load the challenges detail to elastic services. + * Push feeders * - * @param context the context to use - * @throws JobExecutionException if any error occurs + * @param ids the ids to use + * @throws SupplyException if any error occurs */ @Override - public void doJob(JobExecutionContext context) throws JobExecutionException { - RLock lock; - RedissonClient redisson = null; + protected void pushFeeders(List ids) throws SupplyException { + ChallengeFeederParam param = new ChallengeFeederParam(); + param.setIndex(this.indexName); + param.setType(this.typeName); + param.setChallengeIds(ids); try { - if (this.challengeDetailFeederManager == null) { - this.challengeDetailFeederManager = new ChallengeDetailFeederManager(JestClientUtils.get( - GLOBAL_CONFIGURATION.getJestClientConfiguration()), - DAOFactory.getInstance().createDAO(ChallengeDetailFeederDAO.class)); - } - if (this.config == null) { - this.config = GLOBAL_CONFIGURATION; - } - this.challengeDetailFeederManager.setSubmissionImageUrl(this.config.getCommonConfiguration().getSubmissionImageUrl()); - Config redissonConfig = new Config(); - redissonConfig.setLockWatchdogTimeout(this.config.getRedissonConfiguration().getLockWatchdogTimeout()); - if (this.config.getRedissonConfiguration().isClusterEnabled()) { - for (String addr : this.config.getRedissonConfiguration().getNodeAddresses()) { - redissonConfig.useClusterServers().addNodeAddress(addr); - } - - } else { - redissonConfig.useSingleServer().setAddress(this.config.getRedissonConfiguration().getSingleServerAddress()); - } - - logger.info("Try to get the lock"); - redisson = Redisson.create(redissonConfig); - lock = redisson.getLock(config.getRedissonConfiguration().getLoadChangedChallengesDetailJobLockerKeyName()); - if (lock.tryLock()) { - logger.info("Get the lock for challenges job successfully"); - try { - RMapCache mapCache = redisson.getMapCache(config.getRedissonConfiguration().getLoadChangedChallengesDetailJobLastRunTimestampPrefix()); - - String timestamp = mapCache.get(config.getRedissonConfiguration().getLoadChangedChallengesDetailJobLastRunTimestampPrefix()); - - Date lastRunTimestamp = new Date(1L); - if (timestamp != null) { - lastRunTimestamp = DATE_FORMAT.parse(timestamp); - } - - logger.info("The last run timestamp for challenges job is:" + timestamp); - - String currentTime = DATE_FORMAT.format(new Date()); - - List totalIds = this.challengeDetailFeederManager.getChangedChallengeIds(new java.sql.Date(lastRunTimestamp.getTime())); - - List ids = new ArrayList<>(); - for (int i = 0; i < totalIds.size(); ++i) { - ids.add(Long.parseLong(totalIds.get(i).getId())); - } - logger.info("The count of the challenge ids to load:" + ids.size()); - logger.info("The challenge ids to load:" + ids); - - int batchSize = this.config.getRedissonConfiguration().getBatchUpdateSize(); - int to = 0; - int from = 0; - while (to < ids.size()) { - to += (to + batchSize) > ids.size() ? (ids.size() - to) : batchSize; - List sub = ids.subList(from, to); - ChallengeFeederParam param = new ChallengeFeederParam(); - param.setIndex(this.config.getRedissonConfiguration().getChallengesDetailIndex()); - param.setType(this.config.getRedissonConfiguration().getChallengesDetailType()); - param.setChallengeIds(sub); - try { - this.challengeDetailFeederManager.pushChallengeFeeder(param); - } catch (Exception e) { - // ignore all exception - e.printStackTrace(); - } - - from = to; - } - - logger.info("update last run timestamp for challenges job is:" + currentTime); - mapCache.put(config.getRedissonConfiguration().getLoadChangedChallengesDetailJobLastRunTimestampPrefix(), currentTime); - } finally { - logger.info("release the lock for challenges job"); - lock.unlock(); - } - } else { - logger.warn("the previous challenges job is still running"); - } - } catch (Exception exp) { - exp.printStackTrace(); - } finally { - if (redisson != null) { - redisson.shutdown(); - } + this.challengeDetailFeederManager.pushChallengeFeeder(param); + } catch (Exception e) { + // ignore all exception + e.printStackTrace(); } } + + /** + * Get feeder ids to push + * + * @param lastRunTimestamp the lastRunTimestamp to use + * @throws SupplyException if any error occurs + * @return the List result + */ + @Override + protected List getFeederIdsToPush(Date lastRunTimestamp) throws SupplyException { + List totalIds = this.challengeDetailFeederManager.getChangedChallengeIds(new java.sql.Date(lastRunTimestamp.getTime())); + return convertTCID(totalIds); + } } diff --git a/src/main/java/com/appirio/service/challengefeeder/job/LoadChangedChallengesJob.java b/src/main/java/com/appirio/service/challengefeeder/job/LoadChangedChallengesJob.java deleted file mode 100644 index b3939fd..0000000 --- a/src/main/java/com/appirio/service/challengefeeder/job/LoadChangedChallengesJob.java +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Copyright (C) 2018 TopCoder Inc., All Rights Reserved. - */ -package com.appirio.service.challengefeeder.job; - -import java.text.DateFormat; -import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import java.util.TimeZone; - -import org.quartz.JobExecutionContext; -import org.quartz.JobExecutionException; -import org.redisson.Redisson; -import org.redisson.api.RLock; -import org.redisson.api.RMapCache; -import org.redisson.api.RedissonClient; -import org.redisson.config.Config; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.appirio.service.challengefeeder.ChallengeFeederServiceConfiguration; -import com.appirio.service.challengefeeder.dao.ChallengeFeederDAO; -import com.appirio.service.challengefeeder.dto.ChallengeFeederParam; -import com.appirio.service.challengefeeder.manager.ChallengeFeederManager; -import com.appirio.service.challengefeeder.util.JestClientUtils; -import com.appirio.supply.DAOFactory; -import com.appirio.tech.core.api.v3.TCID; - -import de.spinscale.dropwizard.jobs.Job; -import de.spinscale.dropwizard.jobs.annotations.DelayStart; -import de.spinscale.dropwizard.jobs.annotations.Every; - -/** - * LoadChangedChallengesJob is used to load the changed challenges. - * - * It's added in Topcoder - Create CronJob For Populating Changed Challenges To Elasticsearch v1.0 - * - * @author TCCoder - * @version 1.0 - * - */ -@DelayStart("15s") -@Every("${com.appirio.service.challengefeeder.job.LoadChangedChallengesJob}") -public class LoadChangedChallengesJob extends BaseJob { - /** - * Logger used to log events - */ - private static final Logger logger = LoggerFactory.getLogger(LoadChangedChallengesJob.class); - - /** - * The challengeFeederManager field - */ - private ChallengeFeederManager challengeFeederManager; - - /** - * Create LoadChangedChallengesJob - * - * @param challengeFeederManager the challengeFeederManager to use - * @param config the config to use - */ - public LoadChangedChallengesJob(ChallengeFeederManager challengeFeederManager, ChallengeFeederServiceConfiguration config) { - this.challengeFeederManager = challengeFeederManager; - this.config = config; - } - - /** - * Create LoadChangedChallengesJob - * - */ - public LoadChangedChallengesJob() { - } - - /** - * Do job. This methods load the challenges to elastic services. - * - * @param context the context to use - * @throws JobExecutionException if any error occurs - */ - @Override - public void doJob(JobExecutionContext context) throws JobExecutionException { - RLock lock; - RedissonClient redisson = null; - try { - if (this.challengeFeederManager == null) { - this.challengeFeederManager = new ChallengeFeederManager(JestClientUtils.get(GLOBAL_CONFIGURATION.getJestClientConfiguration()), DAOFactory.getInstance().createDAO(ChallengeFeederDAO.class)); - } - if (this.config == null) { - this.config = GLOBAL_CONFIGURATION; - } - Config redissonConfig = new Config(); - redissonConfig.setLockWatchdogTimeout(this.config.getRedissonConfiguration().getLockWatchdogTimeout()); - if (this.config.getRedissonConfiguration().isClusterEnabled()) { - for (String addr : this.config.getRedissonConfiguration().getNodeAddresses()) { - redissonConfig.useClusterServers().addNodeAddress(addr); - } - - } else { - redissonConfig.useSingleServer().setAddress(this.config.getRedissonConfiguration().getSingleServerAddress()); - } - - logger.info("Try to get the lock"); - redisson = Redisson.create(redissonConfig); - lock = redisson.getLock(config.getRedissonConfiguration().getLoadChangedChallengesJobLockerKeyName()); - if (lock.tryLock()) { - logger.info("Get the lock for challenges job successfully"); - try { - RMapCache mapCache = redisson.getMapCache(config.getRedissonConfiguration().getLoadChangedChallengesJobLastRunTimestampPrefix()); - - String timestamp = mapCache.get(config.getRedissonConfiguration().getLoadChangedChallengesJobLastRunTimestampPrefix()); - - Date lastRunTimestamp = new Date(1L); - if (timestamp != null) { - lastRunTimestamp = DATE_FORMAT.parse(timestamp); - } - - logger.info("The last run timestamp for challenges job is:" + timestamp); - - String currentTime = DATE_FORMAT.format(new Date()); - - List totalIds = this.challengeFeederManager.getChangedChallengeIds(new java.sql.Date(lastRunTimestamp.getTime())); - - List ids = new ArrayList<>(); - for (int i = 0; i < totalIds.size(); ++i) { - ids.add(Long.parseLong(totalIds.get(i).getId())); - } - logger.info("The count of the challenge ids to load:" + ids.size()); - logger.info("The challenge ids to load:" + ids); - - int batchSize = this.config.getRedissonConfiguration().getBatchUpdateSize(); - int to = 0; - int from = 0; - while (to < ids.size()) { - to += (to + batchSize) > ids.size() ? (ids.size() - to) : batchSize; - List sub = ids.subList(from, to); - ChallengeFeederParam param = new ChallengeFeederParam(); - param.setIndex(this.config.getRedissonConfiguration().getChallengesIndex()); - param.setType(this.config.getRedissonConfiguration().getChallengesType()); - param.setChallengeIds(sub); - try { - this.challengeFeederManager.pushChallengeFeeder(param); - } catch (Exception e) { - // ignore all exception - e.printStackTrace(); - } - - from = to; - } - - logger.info("update last run timestamp for challenges job is:" + currentTime); - mapCache.put(config.getRedissonConfiguration().getLoadChangedChallengesJobLastRunTimestampPrefix(), currentTime); - } finally { - logger.info("release the lock for challenges job"); - lock.unlock(); - } - } else { - logger.warn("the previous challenges job is still running"); - } - } catch (Exception exp) { - exp.printStackTrace(); - } finally { - if (redisson != null) { - redisson.shutdown(); - } - } - } -} \ No newline at end of file diff --git a/src/main/java/com/appirio/service/challengefeeder/job/LoadChangedChallengesListingJob.java b/src/main/java/com/appirio/service/challengefeeder/job/LoadChangedChallengesListingJob.java index 39b2256..ab2d16a 100644 --- a/src/main/java/com/appirio/service/challengefeeder/job/LoadChangedChallengesListingJob.java +++ b/src/main/java/com/appirio/service/challengefeeder/job/LoadChangedChallengesListingJob.java @@ -3,165 +3,107 @@ */ package com.appirio.service.challengefeeder.job; -import java.util.ArrayList; import java.util.Date; import java.util.List; -import org.quartz.JobExecutionContext; -import org.quartz.JobExecutionException; -import org.redisson.Redisson; -import org.redisson.api.RLock; -import org.redisson.api.RMapCache; -import org.redisson.api.RedissonClient; -import org.redisson.config.Config; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.appirio.service.challengefeeder.ChallengeFeederServiceConfiguration; -import com.appirio.service.challengefeeder.dao.ChallengeFeederDAO; import com.appirio.service.challengefeeder.dao.ChallengeListingFeederDAO; import com.appirio.service.challengefeeder.dto.ChallengeFeederParam; import com.appirio.service.challengefeeder.manager.ChallengeListingFeederManager; import com.appirio.service.challengefeeder.util.JestClientUtils; import com.appirio.supply.DAOFactory; +import com.appirio.supply.SupplyException; import com.appirio.tech.core.api.v3.TCID; import de.spinscale.dropwizard.jobs.annotations.DelayStart; import de.spinscale.dropwizard.jobs.annotations.Every; +import lombok.NoArgsConstructor; /** - * LoadChangedChallengeListingJob is used to load the changed challenges listing. + * LoadChangedChallengeListingJob is used to load the changed challenges + * listing. * * It's added in Topcoder ElasticSearch Feeder Service - Way To Populate Challenge-Listing Index v1.0 * + * Version 2.0 - Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + * - refactor it to use the new configuration + * + * * @author TCCoder - * @version 1.0 + * @version 2.0 * */ @DelayStart("14s") @Every("${com.appirio.service.challengefeeder.job.LoadChangedChallengesListingJob}") +@NoArgsConstructor public class LoadChangedChallengesListingJob extends BaseJob { - /** - * Logger used to log events - */ - private static final Logger logger = LoggerFactory.getLogger(LoadChangedChallengesListingJob.class); - /** * The challengeListingFeederManager field */ private ChallengeListingFeederManager challengeListingFeederManager; - + /** - * Create LoadChangedChallengesListingJob + * Init job * - * @param challengeListingFeederManager the challengeListingFeederManager to use - * @param config the config to use + * @throws SupplyException if any error occurs */ - public LoadChangedChallengesListingJob(ChallengeListingFeederManager challengeListingFeederManager, ChallengeFeederServiceConfiguration config) { - this.challengeListingFeederManager = challengeListingFeederManager; - this.config = config; + @Override + protected void init() throws SupplyException { + super.init(); + if (this.challengeListingFeederManager == null) { + this.challengeListingFeederManager = new ChallengeListingFeederManager(JestClientUtils.get(this.config.getJestClientConfiguration()), + DAOFactory.getInstance().createDAO(ChallengeListingFeederDAO.class), this.config.getChallengeConfiguration()); + } + if (this.indexName == null) { + this.indexName = this.config.getJobsConfiguration().getLoadChangedChallengesListingJob().getIndexName(); + this.typeName = "challenges"; + this.lastRuntimestampeKey = this.indexName + BaseJob.JOB_LAST_RUN_TIMESTAMP_SUFFIX; + this.lockerKey = this.indexName + BaseJob.JOB_LOCKER_NAME_SUFFIX; + this.jobEnableKey = this.indexName + BaseJob.JOB_ENABLE_SUFFIX; + this.batchSize = this.config.getJobsConfiguration().getLoadChangedChallengesListingJob().getBatchUpdateSize(); + } } - + /** - * Create LoadChangedChallengesListingJob + * Get timestamp * + * @throws SupplyException if any error occurs + * @return the Date result */ - public LoadChangedChallengesListingJob() { + @Override + protected Date getTimestamp() throws SupplyException { + return this.challengeListingFeederManager.getTimestamp(); } - + /** - * Do job. This methods load the challenges to elastic services. + * Push feeders * - * @param context the context to use - * @throws JobExecutionException if any error occurs + * @param ids the ids to use + * @throws SupplyException if any error occurs */ @Override - public void doJob(JobExecutionContext context) throws JobExecutionException { - RLock lock; - RedissonClient redisson = null; + protected void pushFeeders(List ids) throws SupplyException { + ChallengeFeederParam param = new ChallengeFeederParam(); + param.setIndex(this.indexName); + param.setType(this.typeName); + param.setChallengeIds(ids); try { - if (this.challengeListingFeederManager == null) { - this.challengeListingFeederManager = new ChallengeListingFeederManager(JestClientUtils.get(GLOBAL_CONFIGURATION.getJestClientConfiguration()), - DAOFactory.getInstance().createDAO(ChallengeFeederDAO.class), - DAOFactory.getInstance().createDAO(ChallengeListingFeederDAO.class), - GLOBAL_CONFIGURATION.getChallengeConfiguration()); - } - if (this.config == null) { - this.config = GLOBAL_CONFIGURATION; - } - Config redissonConfig = new Config(); - redissonConfig.setLockWatchdogTimeout(this.config.getRedissonConfiguration().getLockWatchdogTimeout()); - if (this.config.getRedissonConfiguration().isClusterEnabled()) { - for (String addr : this.config.getRedissonConfiguration().getNodeAddresses()) { - redissonConfig.useClusterServers().addNodeAddress(addr); - } - - } else { - redissonConfig.useSingleServer().setAddress(this.config.getRedissonConfiguration().getSingleServerAddress()); - } - - logger.info("Try to get the lock for the challenges listing job"); - redisson = Redisson.create(redissonConfig); - lock = redisson.getLock(config.getRedissonConfiguration().getLoadChangedChallengesListingJobLockerKeyName()); - if (lock.tryLock()) { - logger.info("Get the lock for challenges listing job successfully"); - try { - RMapCache mapCache = redisson.getMapCache(config.getRedissonConfiguration().getLoadChangedChallengesListingJobLastRunTimestampPrefix()); - - String timestamp = mapCache.get(config.getRedissonConfiguration().getLoadChangedChallengesListingJobLastRunTimestampPrefix()); - - Date lastRunTimestamp = new Date(1L); - if (timestamp != null) { - lastRunTimestamp = DATE_FORMAT.parse(timestamp); - } - - logger.info("The last run timestamp for challenges listing job is:" + timestamp); - - String currentTime = DATE_FORMAT.format(new Date()); - List totalIds = this.challengeListingFeederManager.getChangedChallengeIds(new java.sql.Date(lastRunTimestamp.getTime())); - - List ids = new ArrayList<>(); - for (int i = 0; i < totalIds.size(); ++i) { - ids.add(Long.parseLong(totalIds.get(i).getId())); - } - logger.info("The count of the challenge ids for listing to load:" + ids.size()); - logger.info("The challenge ids for listing to load:" + ids); - - int batchSize = this.config.getRedissonConfiguration().getBatchUpdateSize(); - int to = 0; - int from = 0; - while (to < ids.size()) { - to += (to + batchSize) > ids.size() ? (ids.size() - to) : batchSize; - List sub = ids.subList(from, to); - ChallengeFeederParam param = new ChallengeFeederParam(); - param.setIndex(this.config.getRedissonConfiguration().getChallengesListingIndex()); - param.setType(this.config.getRedissonConfiguration().getChallengesListingType()); - param.setChallengeIds(sub); - try { - this.challengeListingFeederManager.pushChallengeFeeder(param); - } catch (Exception e) { - // ignore all exception - e.printStackTrace(); - } - - from = to; - } - - logger.info("update last run timestamp for challenges listing job is:" + currentTime); - mapCache.put(config.getRedissonConfiguration().getLoadChangedChallengesListingJobLastRunTimestampPrefix(), currentTime); - } finally { - logger.info("release the lock for challenges listing job"); - lock.unlock(); - } - } else { - logger.warn("the previous challenges listing job is still running"); - } - } catch (Exception exp) { - exp.printStackTrace(); - } finally { - if (redisson != null) { - redisson.shutdown(); - } + this.challengeListingFeederManager.pushChallengeFeeder(param); + } catch (Exception e) { + // ignore all exception + e.printStackTrace(); } } + + /** + * Get feeder ids to push + * + * @param lastRunTimestamp the lastRunTimestamp to use + * @throws SupplyException if any error occurs + * @return the List result + */ + @Override + protected List getFeederIdsToPush(Date lastRunTimestamp) throws SupplyException { + List ids = this.challengeListingFeederManager.getChangedChallengeIds(new java.sql.Date(lastRunTimestamp.getTime())); + return convertTCID(ids); + } } diff --git a/src/main/java/com/appirio/service/challengefeeder/job/LoadChangedMMChallengeDetailJob.java b/src/main/java/com/appirio/service/challengefeeder/job/LoadChangedMMChallengeDetailJob.java index 2d06cfd..806b711 100644 --- a/src/main/java/com/appirio/service/challengefeeder/job/LoadChangedMMChallengeDetailJob.java +++ b/src/main/java/com/appirio/service/challengefeeder/job/LoadChangedMMChallengeDetailJob.java @@ -1,27 +1,15 @@ package com.appirio.service.challengefeeder.job; -import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.List; -import org.quartz.JobExecutionContext; -import org.quartz.JobExecutionException; -import org.redisson.Redisson; -import org.redisson.api.RLock; -import org.redisson.api.RMapCache; -import org.redisson.api.RedissonClient; -import org.redisson.config.Config; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.appirio.service.challengefeeder.ChallengeFeederServiceConfiguration; -import com.appirio.service.challengefeeder.dao.MmFeederDAO; import com.appirio.service.challengefeeder.dao.ChallengeDetailMMFeederDAO; import com.appirio.service.challengefeeder.dto.MmFeederParam; import com.appirio.service.challengefeeder.manager.ChallengeDetailMMFeederManager; import com.appirio.service.challengefeeder.util.JestClientUtils; import com.appirio.supply.DAOFactory; +import com.appirio.supply.SupplyException; import com.appirio.tech.core.api.v3.TCID; import de.spinscale.dropwizard.jobs.annotations.DelayStart; @@ -30,9 +18,13 @@ /** * LoadChangedMMChallengeDetailJob is used to populate legacy marathon matches to elasticsearch challenge detail. + * + * Version 2.0 - Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + * - refactor it to use the new configuration + * * * @author TCCoder - * @version 1.0 + * @version 2.0 * */ @DelayStart("21s") @@ -46,120 +38,77 @@ public class LoadChangedMMChallengeDetailJob extends BaseJob { private ChallengeDetailMMFeederManager manager; /** - * Logger used to log events + * Init the job + * + * @throws SupplyException if any error occurs */ - private static final Logger logger = LoggerFactory.getLogger(LoadChangedMMChallengeDetailJob.class); + @Override + protected void init() throws SupplyException { + super.init(); + if (this.manager == null) { + this.manager = new ChallengeDetailMMFeederManager(JestClientUtils.get(this.config.getJestClientConfiguration()), + DAOFactory.getInstance().createDAO(ChallengeDetailMMFeederDAO.class)); + } + + if (this.indexName == null) { + this.indexName = this.config.getJobsConfiguration().getLoadChangedMMChallengeDetailJob().getIndexName(); + this.typeName = "challenges"; + this.lastRuntimestampeKey = this.indexName + BaseJob.JOB_LAST_RUN_TIMESTAMP_SUFFIX; + this.lockerKey = this.indexName + BaseJob.JOB_LOCKER_NAME_SUFFIX; + this.jobEnableKey = this.indexName + BaseJob.JOB_ENABLE_SUFFIX; + this.batchSize = this.config.getJobsConfiguration().getLoadChangedMMChallengeDetailJob().getBatchUpdateSize(); + } + } /** - * Create LoadChangedMMChallengeDetailJob + * Get timestamp * - * @param manager the manager to use - * @param config the config to use + * @throws SupplyException if any error occurs + * @return the Date result */ - public LoadChangedMMChallengeDetailJob(ChallengeDetailMMFeederManager manager, ChallengeFeederServiceConfiguration config) { - super(config); - this.manager = manager; + @Override + protected Date getTimestamp() throws SupplyException { + return this.manager.getTimestamp(); } /** - * Do job. Load the legacy marathon matches to elasticsearch service. + * Push feeders * - * @param context the job context to use - * @throws JobExecutionException if any error occurs + * @param ids the ids to use + * @throws SupplyException if any error occurs */ @Override - public void doJob(JobExecutionContext context) throws JobExecutionException { - RLock lock = null; - RedissonClient redisson = null; + protected void pushFeeders(List ids) throws SupplyException { + MmFeederParam param = new MmFeederParam(); + param.setIndex(this.indexName); + param.setType(this.typeName); + param.setRoundIds(ids); + param.setLegacy(Boolean.TRUE); try { - if (this.manager == null) { - this.manager = new ChallengeDetailMMFeederManager(JestClientUtils.get(GLOBAL_CONFIGURATION.getJestClientConfiguration()), - DAOFactory.getInstance().createDAO(MmFeederDAO.class), DAOFactory.getInstance().createDAO(ChallengeDetailMMFeederDAO.class)); - } - if (this.config == null) { - this.config = GLOBAL_CONFIGURATION; - } - Config redissonConfig = new Config(); - redissonConfig.setLockWatchdogTimeout(this.config.getRedissonConfiguration().getLockWatchdogTimeout()); - if (this.config.getRedissonConfiguration().isClusterEnabled()) { - for (String addr : this.config.getRedissonConfiguration().getNodeAddresses()) { - redissonConfig.useClusterServers().addNodeAddress(addr); - } - } else { - redissonConfig.useSingleServer().setAddress(this.config.getRedissonConfiguration().getSingleServerAddress()); - } - - logger.info("Try to get the lock for legacy marathon matches challenge detail job"); - redisson = Redisson.create(redissonConfig); - lock = redisson.getLock(config.getRedissonConfiguration().getLoadChangedMMChallengeDetailJobLockerKeyName()); - if (lock.tryLock()) { - logger.info("Get the lock for legacy marathon matches challenge detail job successfully"); - try { - RMapCache mapCache = redisson.getMapCache(config.getRedissonConfiguration().getLoadChangedMMChallengeDetailJobLastRunTimestampPrefix()); - String timestamp = mapCache.get(config.getRedissonConfiguration().getLoadChangedMMChallengeDetailJobLastRunTimestampPrefix()); - - Date lastRunTimestamp = new Date(INITIAL_TIMESTAMP); - if (timestamp != null) { - lastRunTimestamp = DATE_FORMAT.parse(timestamp); - } - - logger.info("The last run timestamp for marathon matches challenge detail job is:" + lastRunTimestamp); - - Date currentTimestamp = new Date(); - Calendar calendar = Calendar.getInstance(); - calendar.setTime(currentTimestamp); - calendar.add(Calendar.DAY_OF_MONTH, this.config.getRedissonConfiguration().getMarathonMatchesDaysToSubtract()); - Date dateParam = calendar.getTime(); - - logger.info("The initial timestamp for legacy marathon matches challenge detail job is:" + dateParam); - - List totalIds = this.manager.getMatchesWithRegistrationPhaseStartedIds(new java.sql.Date(dateParam.getTime()), lastRunTimestamp.getTime()); - - List ids = new ArrayList<>(); - for (int i = 0; i < totalIds.size(); ++i) { - ids.add(Long.parseLong(totalIds.get(i).getId())); - } - - logger.info("The count of the MM ids to load:" + ids.size()); - logger.info("The MM ids to load:" + ids); - - int batchSize = this.config.getRedissonConfiguration().getBatchUpdateSize(); - int to = 0; - int from = 0; - while (to < ids.size()) { - to += (to + batchSize) > ids.size() ? (ids.size() - to) : batchSize; - List sub = ids.subList(from, to); - MmFeederParam param = new MmFeederParam(); - param.setIndex(this.config.getRedissonConfiguration().getChallengesDetailIndex()); - param.setType(this.config.getRedissonConfiguration().getChallengesDetailType()); - param.setRoundIds(sub); - param.setLegacy(Boolean.TRUE); - try { - this.manager.pushMarathonMatchDataIntoChallengeDetail(param); - } catch(Exception e) { - e.printStackTrace(); - } - from = to; - } - - // mark last execution as current timestamp - logger.info("update last run timestamp for challenges challenge detail job is:" + currentTimestamp); - mapCache.put(config.getRedissonConfiguration().getLoadChangedMMChallengeDetailJobLastRunTimestampPrefix(), DATE_FORMAT.format(currentTimestamp)); - } finally { - logger.info("release the lock for legacy marathon matches job"); - lock.unlock(); - } - } else { - logger.warn("the previous job for legacy marathon matches challenge detail job is still running"); - } - } catch(Exception exp) { - exp.printStackTrace(); - } finally { - if (redisson != null) { - redisson.shutdown(); - } + this.manager.pushMarathonMatchDataIntoChallengeDetail(param); + } catch(Exception e) { + e.printStackTrace(); } } + /** + * Get feeder ids to push + * + * @param lastRunTimestamp the lastRunTimestamp to use + * @throws SupplyException if any error occurs + * @return the List result + */ + @Override + protected List getFeederIdsToPush(Date lastRunTimestamp) throws SupplyException { + Date currentTimestamp = new Date(); + Calendar calendar = Calendar.getInstance(); + calendar.setTime(currentTimestamp); + calendar.add(Calendar.DAY_OF_MONTH, this.config.getJobsConfiguration().getLoadChangedMMChallengeDetailJob().getMarathonMatchesDaysToSubtract()); + Date dateParam = calendar.getTime(); + + List totalIds = this.manager.getMatchesWithRegistrationPhaseStartedIds(new java.sql.Date(dateParam.getTime()), lastRunTimestamp.getTime()); + + return convertTCID(totalIds); + } } diff --git a/src/main/java/com/appirio/service/challengefeeder/job/MarathonMatchesJob.java b/src/main/java/com/appirio/service/challengefeeder/job/MarathonMatchesJob.java index 712cdd9..393f5ec 100644 --- a/src/main/java/com/appirio/service/challengefeeder/job/MarathonMatchesJob.java +++ b/src/main/java/com/appirio/service/challengefeeder/job/MarathonMatchesJob.java @@ -3,28 +3,16 @@ */ package com.appirio.service.challengefeeder.job; -import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.List; -import java.util.concurrent.TimeUnit; -import org.quartz.JobExecutionContext; -import org.quartz.JobExecutionException; -import org.redisson.Redisson; -import org.redisson.api.RLock; -import org.redisson.api.RMapCache; -import org.redisson.api.RedissonClient; -import org.redisson.config.Config; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.appirio.service.challengefeeder.ChallengeFeederServiceConfiguration; import com.appirio.service.challengefeeder.dao.MarathonMatchFeederDAO; import com.appirio.service.challengefeeder.dto.DataScienceFeederParam; import com.appirio.service.challengefeeder.manager.MarathonMatchFeederManager; import com.appirio.service.challengefeeder.util.JestClientUtils; import com.appirio.supply.DAOFactory; +import com.appirio.supply.SupplyException; import com.appirio.tech.core.api.v3.TCID; import de.spinscale.dropwizard.jobs.annotations.DelayStart; @@ -36,132 +24,91 @@ * * It was added in Topcoder - Create CronJob For Populating Marathon Matches and SRMs To Elasticsearch v1.0 * + * Version 2.0 - Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + * - refactor it to use the new configuration + * + * * @author TCCoder - * @version 1.0 + * @version 2.0 * */ @DelayStart("20s") @Every("${com.appirio.service.challengefeeder.job.MarathonMatchesJob}") @NoArgsConstructor public class MarathonMatchesJob extends BaseJob { - /** * The marathon match feeder manager instance. */ private MarathonMatchFeederManager manager; /** - * Logger used to log events + * Init the job + * + * @throws SupplyException if any error occurs */ - private static final Logger logger = LoggerFactory.getLogger(MarathonMatchesJob.class); + @Override + protected void init() throws SupplyException { + super.init(); + if (this.manager == null) { + this.manager = new MarathonMatchFeederManager(JestClientUtils.get(this.config.getJestClientConfiguration()), + DAOFactory.getInstance().createDAO(MarathonMatchFeederDAO.class)); + } + if (this.indexName == null) { + this.indexName = this.config.getJobsConfiguration().getMarathonMatchesJob().getIndexName(); + this.typeName = "mmatches"; + this.lastRuntimestampeKey = this.indexName + BaseJob.JOB_LAST_RUN_TIMESTAMP_SUFFIX; + this.lockerKey = this.indexName + BaseJob.JOB_LOCKER_NAME_SUFFIX; + this.jobEnableKey = this.indexName + BaseJob.JOB_ENABLE_SUFFIX; + this.batchSize = this.config.getJobsConfiguration().getMarathonMatchesJob().getBatchUpdateSize(); + } + } /** - * MarathonMatchesJob constructor. + * Get timestamp * - * @param manager the MarathonMatchFeederManager to use - * @param config the config to use + * @throws SupplyException if any error occurs + * @return the Date result */ - public MarathonMatchesJob(MarathonMatchFeederManager manager, ChallengeFeederServiceConfiguration config) { - super(config); - this.manager = manager; + @Override + protected Date getTimestamp() throws SupplyException { + return this.manager.getTimestamp(); } /** - * Do job. Load the marathon matches to elasticsearch service. + * Push feeders * - * @param context the job context to use - * @throws JobExecutionException if any error occurs + * @param ids the ids to use + * @throws SupplyException if any error occurs */ @Override - public void doJob(JobExecutionContext context) throws JobExecutionException { - RLock lock = null; - RedissonClient redisson = null; + protected void pushFeeders(List ids) throws SupplyException { + DataScienceFeederParam param = new DataScienceFeederParam(); + param.setIndex(this.indexName); + param.setType(this.typeName); + param.setRoundIds(ids); try { - if (this.manager == null) { - this.manager = new MarathonMatchFeederManager(JestClientUtils.get(GLOBAL_CONFIGURATION.getJestClientConfiguration()), DAOFactory.getInstance().createDAO(MarathonMatchFeederDAO.class)); - } - if (this.config == null) { - this.config = GLOBAL_CONFIGURATION; - } - Config redissonConfig = new Config(); - redissonConfig.setLockWatchdogTimeout(this.config.getRedissonConfiguration().getLockWatchdogTimeout()); - if (this.config.getRedissonConfiguration().isClusterEnabled()) { - for (String addr : this.config.getRedissonConfiguration().getNodeAddresses()) { - redissonConfig.useClusterServers().addNodeAddress(addr); - } - } else { - redissonConfig.useSingleServer().setAddress(this.config.getRedissonConfiguration().getSingleServerAddress()); - } - - logger.info("Try to get the lock for marathon matches job"); - redisson = Redisson.create(redissonConfig); - lock = redisson.getLock(config.getRedissonConfiguration().getMarathonMatchesJobLockerKeyName()); - if (lock.tryLock()) { - logger.info("Get the lock for marathon matches job successfully"); - try { - RMapCache mapCache = redisson.getMapCache(config.getRedissonConfiguration().getMarathonMatchesJobLastRunTimestampPrefix()); - String timestamp = mapCache.get(config.getRedissonConfiguration().getMarathonMatchesJobLastRunTimestampPrefix()); - - Date lastRunTimestamp = new Date(INITIAL_TIMESTAMP); - if (timestamp != null) { - lastRunTimestamp = DATE_FORMAT.parse(timestamp); - } - - logger.info("The last run timestamp for marathon matches job is:" + lastRunTimestamp); - - Date currentTimestamp = new Date(); - Calendar calendar = Calendar.getInstance(); - calendar.setTime(currentTimestamp); - calendar.add(Calendar.DAY_OF_MONTH, this.config.getRedissonConfiguration().getMarathonMatchesDaysToSubtract()); - Date dateParam = calendar.getTime(); - - logger.info("The initial timestamp for marathon matches job is:" + dateParam); - - List totalIds = this.manager.getMatchesWithRegistrationPhaseStartedIds(new java.sql.Date(dateParam.getTime()), lastRunTimestamp.getTime()); - - List ids = new ArrayList<>(); - for (int i = 0; i < totalIds.size(); ++i) { - ids.add(Long.parseLong(totalIds.get(i).getId())); - } - - logger.info("The count of the MM ids to load:" + ids.size()); - logger.info("The MM ids to load:" + ids); - - int batchSize = this.config.getRedissonConfiguration().getBatchUpdateSize(); - int to = 0; - int from = 0; - while (to < ids.size()) { - to += (to + batchSize) > ids.size() ? (ids.size() - to) : batchSize; - List sub = ids.subList(from, to); - DataScienceFeederParam param = new DataScienceFeederParam(); - param.setIndex(this.config.getRedissonConfiguration().getMmIndex()); - param.setType(this.config.getRedissonConfiguration().getMmType()); - param.setRoundIds(sub); - try { - this.manager.pushMarathonMatchFeeder(param); - } catch(Exception e) { - e.printStackTrace(); - } - from = to; - } - - // mark last execution as current timestamp - logger.info("update last run timestamp for challenges job is:" + currentTimestamp); - mapCache.put(config.getRedissonConfiguration().getMarathonMatchesJobLastRunTimestampPrefix(), DATE_FORMAT.format(currentTimestamp)); - } finally { - logger.info("release the lock for marathon matches job"); - lock.unlock(); - } - } else { - logger.warn("the previous job for marathon matches job is still running"); - } - } catch(Exception exp) { - exp.printStackTrace(); - } finally { - if (redisson != null) { - redisson.shutdown(); - } + this.manager.pushMarathonMatchFeeder(param); + } catch(Exception e) { + e.printStackTrace(); } } + /** + * Get feeder ids to push + * + * @param lastRunTimestamp the lastRunTimestamp to use + * @throws SupplyException if any error occurs + * @return the List result + */ + @Override + protected List getFeederIdsToPush(Date lastRunTimestamp) throws SupplyException { + Date currentTimestamp = new Date(); + Calendar calendar = Calendar.getInstance(); + calendar.setTime(currentTimestamp); + calendar.add(Calendar.DAY_OF_MONTH, this.config.getJobsConfiguration().getMarathonMatchesJob().getMarathonMatchesDaysToSubtract()); + Date dateParam = calendar.getTime(); + List totalIds = this.manager.getMatchesWithRegistrationPhaseStartedIds(new java.sql.Date(dateParam.getTime()), lastRunTimestamp.getTime()); + + return convertTCID(totalIds); + } } diff --git a/src/main/java/com/appirio/service/challengefeeder/job/SingleRoundMatchesJob.java b/src/main/java/com/appirio/service/challengefeeder/job/SingleRoundMatchesJob.java index 00b3da3..d6ab6f8 100644 --- a/src/main/java/com/appirio/service/challengefeeder/job/SingleRoundMatchesJob.java +++ b/src/main/java/com/appirio/service/challengefeeder/job/SingleRoundMatchesJob.java @@ -3,27 +3,16 @@ */ package com.appirio.service.challengefeeder.job; -import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.List; -import java.util.concurrent.TimeUnit; - -import org.quartz.JobExecutionContext; -import org.quartz.JobExecutionException; -import org.redisson.Redisson; -import org.redisson.api.RLock; -import org.redisson.api.RMapCache; -import org.redisson.api.RedissonClient; -import org.redisson.config.Config; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import com.appirio.service.challengefeeder.dao.SRMFeederDAO; import com.appirio.service.challengefeeder.dto.DataScienceFeederParam; import com.appirio.service.challengefeeder.manager.SRMFeederManager; import com.appirio.service.challengefeeder.util.JestClientUtils; import com.appirio.supply.DAOFactory; +import com.appirio.supply.SupplyException; import com.appirio.tech.core.api.v3.TCID; import de.spinscale.dropwizard.jobs.annotations.DelayStart; @@ -35,8 +24,12 @@ * * It was added in Topcoder - Create CronJob For Populating Marathon Matches and SRMs To Elasticsearch v1.0 * + * Version 2.0 - Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + * - refactor it to use the new configuration + * + * * @author TCCoder - * @version 1.0 + * @version 2.0 * */ @DelayStart("25s") @@ -50,116 +43,74 @@ public class SingleRoundMatchesJob extends BaseJob { private SRMFeederManager manager; /** - * Logger used to log events + * Init the job + * + * @throws SupplyException if any error occurs */ - private static final Logger logger = LoggerFactory.getLogger(SingleRoundMatchesJob.class); + @Override + protected void init() throws SupplyException { + super.init(); + if (this.manager == null) { + this.manager = new SRMFeederManager(JestClientUtils.get(this.config.getJestClientConfiguration()), DAOFactory.getInstance().createDAO(SRMFeederDAO.class)); + } + if (this.indexName == null) { + this.indexName = this.config.getJobsConfiguration().getSingleRoundMatchesJob().getIndexName(); + this.typeName = "srms"; + this.lastRuntimestampeKey = this.indexName + BaseJob.JOB_LAST_RUN_TIMESTAMP_SUFFIX; + this.lockerKey = this.indexName + BaseJob.JOB_LOCKER_NAME_SUFFIX; + this.jobEnableKey = this.indexName + BaseJob.JOB_ENABLE_SUFFIX; + this.batchSize = this.config.getJobsConfiguration().getSingleRoundMatchesJob().getBatchUpdateSize(); + } + } /** - * Create a SingleRoundMatchesJob. - * @param manager the srm feeder manager + * Get timestamp + * + * @throws SupplyException if any error occurs + * @return the Date result */ - public SingleRoundMatchesJob(SRMFeederManager manager) { - this.manager = manager; + @Override + protected Date getTimestamp() throws SupplyException { + return this.manager.getTimestamp(); } /** - * Do job. Load the single round matches to elasticsearch service. + * Push feeders * - * @param context the job context to use - * @throws JobExecutionException if any error occurs + * @param ids the ids to use + * @throws SupplyException if any error occurs */ @Override - public void doJob(JobExecutionContext context) throws JobExecutionException { - RLock lock; - RedissonClient redisson = null; - try { - if (this.manager == null) { - this.manager = new SRMFeederManager(JestClientUtils.get(GLOBAL_CONFIGURATION.getJestClientConfiguration()), DAOFactory.getInstance().createDAO(SRMFeederDAO.class)); - } - if (this.config == null) { - this.config = GLOBAL_CONFIGURATION; - } - Config redissonConfig = new Config(); - redissonConfig.setLockWatchdogTimeout(this.config.getRedissonConfiguration().getLockWatchdogTimeout()); - if (this.config.getRedissonConfiguration().isClusterEnabled()) { - for (String addr : this.config.getRedissonConfiguration().getNodeAddresses()) { - redissonConfig.useClusterServers().addNodeAddress(addr); - } - } else { - redissonConfig.useSingleServer().setAddress(this.config.getRedissonConfiguration().getSingleServerAddress()); - } - - logger.info("Try to get the lock for single algorithm matches job"); - redisson = Redisson.create(redissonConfig); - lock = redisson.getLock(config.getRedissonConfiguration().getSingleRoundMatchesJobLockerKeyName()); - if (lock.tryLock()) { - logger.info("Get the lock for single algorithm matches job successfully"); - try { - RMapCache mapCache = redisson.getMapCache(config.getRedissonConfiguration().getSingleRoundMatchesJobLastRunTimestampPrefix()); - String timestamp = mapCache.get(config.getRedissonConfiguration().getSingleRoundMatchesJobLastRunTimestampPrefix()); - - Date lastRunTimestamp = new Date(INITIAL_TIMESTAMP); - if (timestamp != null) { - lastRunTimestamp = DATE_FORMAT.parse(timestamp); - } - - logger.info("The last run timestamp for single algorithm matches job is:" + lastRunTimestamp.getTime()); - - Date currentTimestamp = new Date(); - Calendar calendar = Calendar.getInstance(); - calendar.setTime(currentTimestamp); - calendar.add(Calendar.DAY_OF_MONTH, this.config.getRedissonConfiguration().getSingleRoundMatchesDaysToSubtract()); - Date dateParam = calendar.getTime(); + protected void pushFeeders(List ids) throws SupplyException { + DataScienceFeederParam param = new DataScienceFeederParam(); - logger.info("The initial timestamp for single algorithm matches job is:" + dateParam); - - List totalIds = this.manager.getMatchesWithRegistrationPhaseStartedIds(new java.sql.Date(dateParam.getTime()), lastRunTimestamp.getTime()); - - List ids = new ArrayList(); - for (int i = 0; i < totalIds.size(); ++i) { - ids.add(Long.parseLong(totalIds.get(i).getId())); - } - logger.info("The count of the SRM ids to load:" + ids.size()); - logger.info("The SRM ids to load:" + ids); - - int batchSize = this.config.getRedissonConfiguration().getBatchUpdateSize(); - int to = 0; - int from = 0; - while (to < ids.size()) { - to += (to + batchSize) > ids.size() ? (ids.size() - to) : batchSize; - List sub = ids.subList(from, to); - DataScienceFeederParam param = new DataScienceFeederParam(); - - param.setIndex(this.config.getRedissonConfiguration().getSrmsIndex()); - param.setType(this.config.getRedissonConfiguration().getSrmsType()); - param.setRoundIds(sub); - try { - this.manager.pushSRMFeeder(param); - } catch(Exception e) { - e.printStackTrace(); - } - - from = to; - } - - // mark last execution as current timestamp - logger.info("update last run timestamp for challenges job is:" + currentTimestamp); - mapCache.put(config.getRedissonConfiguration().getSingleRoundMatchesJobLastRunTimestampPrefix(), DATE_FORMAT.format(currentTimestamp)); - } finally { - logger.info("release the lock for single algorithm matches job"); - lock.unlock(); - } - } else { - logger.warn("the previous job for single algorithm matches job is still running"); - } - - } catch (Exception exp) { - exp.printStackTrace(); - } finally { - if (redisson != null) { - redisson.shutdown(); - } + param.setIndex(this.indexName); + param.setType(this.typeName); + param.setRoundIds(ids); + try { + this.manager.pushSRMFeeder(param); + } catch(Exception e) { + e.printStackTrace(); } } + /** + * Get feeder ids to push + * + * @param lastRunTimestamp the lastRunTimestamp to use + * @throws SupplyException if any error occurs + * @return the List result + */ + @Override + protected List getFeederIdsToPush(Date lastRunTimestamp) throws SupplyException { + Date currentTimestamp = new Date(); + Calendar calendar = Calendar.getInstance(); + calendar.setTime(currentTimestamp); + calendar.add(Calendar.DAY_OF_MONTH, this.config.getJobsConfiguration().getSingleRoundMatchesJob().getSingleRoundMatchesDaysToSubtract()); + Date dateParam = calendar.getTime(); + List totalIds = this.manager.getMatchesWithRegistrationPhaseStartedIds(new java.sql.Date(dateParam.getTime()), lastRunTimestamp.getTime()); + + return convertTCID(totalIds); + } + } diff --git a/src/main/java/com/appirio/service/challengefeeder/job/StartupJob.java b/src/main/java/com/appirio/service/challengefeeder/job/StartupJob.java deleted file mode 100644 index a1e0422..0000000 --- a/src/main/java/com/appirio/service/challengefeeder/job/StartupJob.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright (C) 2018 TopCoder Inc., All Rights Reserved. - */ -package com.appirio.service.challengefeeder.job; - -import org.quartz.JobExecutionContext; -import org.quartz.JobExecutionException; -import org.redisson.Redisson; -import org.redisson.api.RMapCache; -import org.redisson.api.RedissonClient; -import org.redisson.config.Config; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.appirio.service.challengefeeder.ChallengeFeederServiceConfiguration; - -import de.spinscale.dropwizard.jobs.annotations.DelayStart; -import de.spinscale.dropwizard.jobs.annotations.OnApplicationStart; - -/** - * StartupJob is used to remove the redis cache values for jobs - * LoadChallengeChallengesJob, MarathonMatchesJob and SingleRoundMatchesJob. - * - * It's added in Topcoder - Create CronJob For Populating Changed Challenges To - * Elasticsearch v1.0 - * - *

    - * Changes in v1.1 (Topcoder - Create CronJob For Populating Marathon Matches - * and SRMs To Elasticsearch v1.0): - *

      - *
    • Added reference to job MarathonMatchesJob.
    • - *
    • Added reference to job SingleRoundMatchesJob.
    • - *
    • Renamed class from StartupJobForLoadChallengeChallenges to StartupJob
    • - *
    - *

    - *

    - * Version 1.2 - Topcoder ElasticSearch Feeder Service - Way To Populate Challenge-Listing Index v1.0 - * - remove the last run time for LoadChangedChallengesListingJob - *

    - *

    - * Version 1.3 - Topcoder ElasticSearch Feeder Service - Way To Populate Challenge-Listing Index For Legacy Marathon Matches v1.0 - *

      - *
    • remove the last run time for the legacy mm.
    • - *
    • remove the last run time for the legacy mm into challenge listing.
    • - *
    - *

    - *

    - * Version 1.4 - Topcoder ElasticSearch Feeder Service - Way To Populate Challenge-Detail Index For Legacy Marathon Matches v1.0 - *

      - *
    • remove the last run time for the legacy mm.
    • - *
    • remove the last run time for the legacy mm into challenge details.
    • - *
    - *

    - *

    - * Change in 1.5 (Topcoder ElasticSearch Feeder Service - Way To Populate Challenge-Detail Index) - * - Added reference to job LoadChallengeChallengesDetailJob - *

    - * @author TCCoder - * @version 1.5 - * - */ -@DelayStart("10s") -@OnApplicationStart -public class StartupJob extends BaseJob { - - /** - * Logger used to log events - */ - private static final Logger logger = LoggerFactory.getLogger(StartupJob.class); - - /** - * Do job - * - * @param context the context to use - * @throws JobExecutionException if any error occurs - */ - @Override - public void doJob(JobExecutionContext context) throws JobExecutionException { - if (GLOBAL_CONFIGURATION.getRedissonConfiguration().isForceInitialLoad()) { - ChallengeFeederServiceConfiguration config = GLOBAL_CONFIGURATION; - Config redissonConfig = new Config(); - redissonConfig.setLockWatchdogTimeout(config.getRedissonConfiguration().getLockWatchdogTimeout()); - if (config.getRedissonConfiguration().isClusterEnabled()) { - for (String addr : config.getRedissonConfiguration().getNodeAddresses()) { - redissonConfig.useClusterServers().addNodeAddress(addr); - } - - } else { - redissonConfig.useSingleServer().setAddress(config.getRedissonConfiguration().getSingleServerAddress()); - } - - RedissonClient redisson = Redisson.create(redissonConfig); - RMapCache mapCache = redisson.getMapCache(config.getRedissonConfiguration().getLoadChangedChallengesDetailJobLastRunTimestampPrefix()); - String time = mapCache.remove(config.getRedissonConfiguration().getLoadChangedChallengesDetailJobLastRunTimestampPrefix()); - logger.info("Remove the last run time for challenge detail load job:" + time); - - mapCache = redisson.getMapCache(config.getRedissonConfiguration().getLoadChangedChallengesJobLastRunTimestampPrefix()); - time = mapCache.remove(config.getRedissonConfiguration().getLoadChangedChallengesJobLastRunTimestampPrefix()); - logger.info("Remove the last run time for challenge load job:" + time); - - mapCache = redisson.getMapCache(config.getRedissonConfiguration().getLoadChangedChallengesListingJobLastRunTimestampPrefix()); - time = mapCache.remove(config.getRedissonConfiguration().getLoadChangedChallengesListingJobLastRunTimestampPrefix()); - logger.info("Remove the last run time for challenge listing load job:" + time); - - mapCache = redisson.getMapCache(config.getRedissonConfiguration().getMarathonMatchesJobLastRunTimestampPrefix()); - time = mapCache.remove(config.getRedissonConfiguration().getMarathonMatchesJobLastRunTimestampPrefix()); - logger.info("Remove the last run time for mm job:" + time); - - mapCache = redisson.getMapCache(config.getRedissonConfiguration().getSingleRoundMatchesJobLastRunTimestampPrefix()); - time = mapCache.remove(config.getRedissonConfiguration().getSingleRoundMatchesJobLastRunTimestampPrefix()); - logger.info("Remove the last run time for srm job:" + time); - - mapCache = redisson.getMapCache(config.getRedissonConfiguration().getLegacyMMJobLastRunTimestampPrefix()); - time = mapCache.remove(config.getRedissonConfiguration().getLegacyMMJobLastRunTimestampPrefix()); - logger.info("Remove the last run time for legacy mm job:" + time); - - mapCache = redisson.getMapCache(config.getRedissonConfiguration().getLegacyMMToChallengeListingJobLastRunTimestampPrefix()); - time = mapCache.remove(config.getRedissonConfiguration().getLegacyMMToChallengeListingJobLastRunTimestampPrefix()); - logger.info("Remove the last run time for legacy mm to challenge listing job:" + time); - - mapCache = redisson.getMapCache(config.getRedissonConfiguration().getLoadChangedMMChallengeDetailJobLastRunTimestampPrefix()); - time = mapCache.remove(config.getRedissonConfiguration().getLoadChangedMMChallengeDetailJobLastRunTimestampPrefix()); - logger.info("Remove the last run time for legacy mm to challenge details job:" + time); - - redisson.shutdown(); - } - - - } -} \ No newline at end of file diff --git a/src/main/java/com/appirio/service/challengefeeder/manager/ChallengeDetailFeederManager.java b/src/main/java/com/appirio/service/challengefeeder/manager/ChallengeDetailFeederManager.java index 6d95a9d..9c725a8 100644 --- a/src/main/java/com/appirio/service/challengefeeder/manager/ChallengeDetailFeederManager.java +++ b/src/main/java/com/appirio/service/challengefeeder/manager/ChallengeDetailFeederManager.java @@ -33,9 +33,13 @@ /** * ChallengeDetailFeederManager is used to handle the challenge detail feeder. + * + * Version 1.1 - Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + * - remove the userless dao + * * * @author TCSCODER - * @version 1.0 + * @version 1.1 */ public class ChallengeDetailFeederManager { diff --git a/src/main/java/com/appirio/service/challengefeeder/manager/ChallengeDetailMMFeederManager.java b/src/main/java/com/appirio/service/challengefeeder/manager/ChallengeDetailMMFeederManager.java index 1f8b3ac..375088a 100644 --- a/src/main/java/com/appirio/service/challengefeeder/manager/ChallengeDetailMMFeederManager.java +++ b/src/main/java/com/appirio/service/challengefeeder/manager/ChallengeDetailMMFeederManager.java @@ -8,7 +8,6 @@ import com.appirio.service.challengefeeder.api.challengedetail.RegistrantData; import com.appirio.service.challengefeeder.api.challengedetail.SubmissionData; import com.appirio.service.challengefeeder.dao.ChallengeDetailMMFeederDAO; -import com.appirio.service.challengefeeder.dao.MmFeederDAO; import com.appirio.service.challengefeeder.dto.MmFeederParam; import com.appirio.service.challengefeeder.util.JestClientUtils; import com.appirio.supply.SupplyException; @@ -42,12 +41,6 @@ public class ChallengeDetailMMFeederManager { * Logger used to log events */ private static final Logger logger = LoggerFactory.getLogger(ChallengeDetailMMFeederManager.class); - - - /** - * DAO to access marathon match data from the transactional database. - */ - private final MmFeederDAO mmFeederDAO; /** * DAO to access marathon match data from the transactional database. @@ -63,12 +56,10 @@ public class ChallengeDetailMMFeederManager { * Create ChallengeDetailMMFeederManager * * @param jestClient the jestClient to use - * @param mmFeederDAO the mmFeederDAO to use * @param challengeDetailMMFeederDAO the challengeDetailMMFeederDAO to use */ - public ChallengeDetailMMFeederManager(JestClient jestClient, MmFeederDAO mmFeederDAO, ChallengeDetailMMFeederDAO challengeDetailMMFeederDAO) { + public ChallengeDetailMMFeederManager(JestClient jestClient, ChallengeDetailMMFeederDAO challengeDetailMMFeederDAO) { this.jestClient = jestClient; - this.mmFeederDAO = mmFeederDAO; this.challengeDetailMMFeederDAO = challengeDetailMMFeederDAO; } @@ -200,7 +191,7 @@ private static String getColorStyle(Integer rating) { * @return the timestamp result */ public Date getTimestamp() throws SupplyException { - return this.mmFeederDAO.getTimestamp().getDate(); + return this.challengeDetailMMFeederDAO.getTimestamp().getDate(); } /** @@ -211,6 +202,6 @@ public Date getTimestamp() throws SupplyException { * @return The list of TCID. */ public List getMatchesWithRegistrationPhaseStartedIds(java.sql.Date date, long lastRunTimestamp) { - return this.mmFeederDAO.getMatchesWithRegistrationPhaseStartedIds(date, lastRunTimestamp); + return this.challengeDetailMMFeederDAO.getMatchesWithRegistrationPhaseStartedIds(date, lastRunTimestamp); } } diff --git a/src/main/java/com/appirio/service/challengefeeder/manager/ChallengeFeederManager.java b/src/main/java/com/appirio/service/challengefeeder/manager/ChallengeFeederManager.java deleted file mode 100644 index 91c00a5..0000000 --- a/src/main/java/com/appirio/service/challengefeeder/manager/ChallengeFeederManager.java +++ /dev/null @@ -1,552 +0,0 @@ -/* - * Copyright (C) 2017 TopCoder Inc., All Rights Reserved. - */ -package com.appirio.service.challengefeeder.manager; - -import com.appirio.service.challengefeeder.Helper; -import com.appirio.service.challengefeeder.api.ChallengeData; -import com.appirio.service.challengefeeder.api.CheckpointPrizeData; -import com.appirio.service.challengefeeder.api.EventData; -import com.appirio.service.challengefeeder.api.FileTypeData; -import com.appirio.service.challengefeeder.api.PhaseData; -import com.appirio.service.challengefeeder.api.PrizeData; -import com.appirio.service.challengefeeder.api.PropertyData; -import com.appirio.service.challengefeeder.api.ResourceData; -import com.appirio.service.challengefeeder.api.ReviewData; -import com.appirio.service.challengefeeder.api.SubmissionData; -import com.appirio.service.challengefeeder.api.TermsOfUseData; -import com.appirio.service.challengefeeder.api.WinnerData; -import com.appirio.service.challengefeeder.dao.ChallengeFeederDAO; -import com.appirio.service.challengefeeder.dto.ChallengeFeederParam; -import com.appirio.service.challengefeeder.util.JestClientUtils; -import com.appirio.supply.SupplyException; -import com.appirio.supply.constants.*; -import com.appirio.tech.core.api.v3.TCID; -import com.appirio.tech.core.api.v3.request.FieldSelector; -import com.appirio.tech.core.api.v3.request.FilterParameter; -import com.appirio.tech.core.api.v3.request.QueryParameter; -import com.appirio.tech.core.auth.AuthUser; - -import io.searchbox.client.JestClient; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.util.*; - -import javax.servlet.http.HttpServletResponse; - -import java.util.stream.*; - -/** - * ChallengeFeederManager is used to handle the challenge feeder. - * - * Version 1.1 - Topcoder - Populate Marathon Match Related Data Into Challenge Model In Elasticsearch v1.0 - * - challenged to call util classes for common shared methods such as assoicate methods - * - * Version 1.2 - Topcoder - Create CronJob For Populating Changed Challenges To Elasticsearch v1.0 - * - add getTimestamp method to get the current timestamp from the database - * - add pushChallengeFeeder method to call without admin permission check - * - * Version 1.3 - Topcoder - Elasticsearch Service - Populate Challenge Points Prize In Challenges Index - * - add the methods to associate the prize points for the challenge ids - * - add the DAO calls to get prize points for the challenge ids. - * - * @author TCSCODER - * @version 1.3 - */ -public class ChallengeFeederManager { - - /** - * Logger used to log events - */ - private static final Logger logger = LoggerFactory.getLogger(ChallengeFeederManager.class); - - /** - * MM Round id project_info_type_id - */ - private static final long MM_PROPERTY_ID = 56; - - /** - * DAO to access challenge data from the transactional database. - */ - private final ChallengeFeederDAO challengeFeederDAO; - - - /** - * The jestClient field - */ - private final JestClient jestClient; - - /** - * Create ChallengeFeederManager - * - * @param jestClient the jestClient to use - * @param challengeFeederDAO the challengeFeederDAO to use - */ - public ChallengeFeederManager(JestClient jestClient, ChallengeFeederDAO challengeFeederDAO) { - this.jestClient = jestClient; - this.challengeFeederDAO = challengeFeederDAO; - } - - /** - * Push challenge feeder - * - * @param authUser the authUser to use - * @param param the challenge feeders param to use - * @throws SupplyException if any error occurs - */ - public void pushChallengeFeeder(AuthUser authUser, ChallengeFeederParam param) throws SupplyException { - logger.info("Enter of pushChallengeFeeder"); - this.pushChallengeFeeder(param); - } - - /** - * Push challenge feeder - * - * @param param the challenge feeders param to use - * @throws SupplyException if any error occurs - */ - public void pushChallengeFeeder(ChallengeFeederParam param) throws SupplyException { - if (param.getType() == null || param.getType().trim().length() == 0) { - param.setType("challenges"); - } - if (param.getIndex() == null || param.getIndex().trim().length() == 0) { - throw new SupplyException("The index should be non-null and non-empty string.", HttpServletResponse.SC_BAD_REQUEST); - } - if (param.getChallengeIds() == null || param.getChallengeIds().size() == 0) { - throw new SupplyException("Challenge ids must be provided", HttpServletResponse.SC_BAD_REQUEST); - } - if (param.getChallengeIds().contains(null)) { - throw new SupplyException("Null challenge id is not allowed", HttpServletResponse.SC_BAD_REQUEST); - } - - FilterParameter filter = new FilterParameter("challengeIds=in(" + ChallengeFeederUtil.listAsString(param.getChallengeIds()) + ")"); - QueryParameter queryParameter = new QueryParameter(new FieldSelector()); - queryParameter.setFilter(filter); - List challenges = this.challengeFeederDAO.getChallenges(queryParameter); - - List idsNotFound = new ArrayList<>(); - for (Long id : param.getChallengeIds()) { - boolean hit = false; - for (ChallengeData data : challenges) { - if (id.longValue() == data.getId().longValue()) { - hit = true; - break; - } - } - if (!hit) { - idsNotFound.add(id); - } - } - if (!idsNotFound.isEmpty()) { - logger.warn("These challenge ids can not be found:" + idsNotFound); - } - - logger.info("aggregating challenge data for " + param.getChallengeIds()); - - // associate all the data - List phases = this.challengeFeederDAO.getPhases(queryParameter); - ChallengeFeederUtil.associateAllPhases(challenges, phases); -// exclude this for now -// List resources = this.challengeFeederDAO.getResources(queryParameter); -// ChallengeFeederUtil.associateAllResources(challenges, resources); - - List prizes = this.challengeFeederDAO.getPrizes(queryParameter); - ChallengeFeederUtil.associateAllPrizes(challenges, prizes); - - List pointPrizes = this.challengeFeederDAO.getPointsPrize(queryParameter); - ChallengeFeederUtil.associateAllPointsPrize(challenges, pointPrizes); - - List checkpointPrizes = this.challengeFeederDAO.getCheckpointPrizes(queryParameter); - ChallengeFeederUtil.associateAllCheckpointPrizes(challenges, checkpointPrizes); - - List properties = this.challengeFeederDAO.getProperties(queryParameter); - ChallengeFeederUtil.associateAllProperties(challenges, properties); - - List reviews = this.challengeFeederDAO.getReviews(queryParameter); - ChallengeFeederUtil.associateAllReviews(challenges, reviews); -// exclude this for now -// List submissions = this.challengeFeederDAO.getSubmissions(queryParameter); -// ChallengeFeederUtil.associateAllSubmissions(challenges, submissions); - - List winners = this.challengeFeederDAO.getWinners(queryParameter); - ChallengeFeederUtil.associateAllWinners(challenges, winners); - - List fileTypes = this.challengeFeederDAO.getFileTypes(queryParameter); - ChallengeFeederUtil.associateAllFileTypes(challenges, fileTypes); - - List termsOfUse = this.challengeFeederDAO.getTerms(queryParameter); - ChallengeFeederUtil.associateAllTermsOfUse(challenges, termsOfUse); - - List events = this.challengeFeederDAO.getEvents(queryParameter); - ChallengeFeederUtil.associateAllEvents(challenges, events); - - List> groupIds = this.challengeFeederDAO.getGroupIds(queryParameter); - - Map mmRoundToIdMaps = new HashMap<>(); - for (ChallengeData data : challenges) { - if (SubTrack.MARATHON_MATCH.name().equals(data.getSubTrack()) || - SubTrack.DEVELOP_MARATHON_MATCH.name().equals(data.getSubTrack())) { - if (data.getIsBanner() == null) data.setIsBanner(Boolean.FALSE); - data.setIsLegacy(Boolean.FALSE); - for (PropertyData prop : data.getProperties()) { - if (prop.getPropertyId() == MM_PROPERTY_ID && prop.getValue() != null) { - mmRoundToIdMaps.put(Long.valueOf(prop.getValue()), data.getId()); - break; - } - } - } - for (Map item : groupIds) { - if (item.get("challengeId").toString().equals(data.getId().toString())) { - if (data.getGroupIds() == null) { - data.setGroupIds(new ArrayList<>()); - } - if (item.get("groupId") != null) { - data.getGroupIds().add(Long.parseLong(item.get("groupId").toString())); - } - } - } - } - - List> userIds = this.challengeFeederDAO.getUserIds(queryParameter); - for (ChallengeData data : challenges) { - for (Map item : userIds) { - if (item.get("challengeId").toString().equals(data.getId().toString())) { - if (data.getUserIds() == null) { - data.setUserIds(new ArrayList<>()); - } - if (data.getHasUserSubmittedForReview() == null) { - data.setHasUserSubmittedForReview(new ArrayList<>()); - } - data.getUserIds().add(Long.parseLong(item.get("userId").toString())); - data.getHasUserSubmittedForReview().add(item.get("hasUserSubmittedForReview").toString()); - } - } - } - - //find mm contest and component id, submitter - if (mmRoundToIdMaps.size() > 0) { - FilterParameter roundIdFilter = new FilterParameter("roundIds=in(" + - String.join(", ", mmRoundToIdMaps.keySet().stream().map(r -> r.toString()).collect(Collectors.toList())) + ")"); - queryParameter.setFilter(roundIdFilter); - List> mmContestIds = this.challengeFeederDAO.getMMContestComponent(queryParameter); - - List mmResources = this.challengeFeederDAO.getMMResources(queryParameter); - mmContestIds.forEach(cm -> { - ChallengeData challenge = challenges.stream().filter(c -> c.getId().equals(mmRoundToIdMaps.get(Long.valueOf(cm.get("roundId").toString())))) - .findFirst().orElse(null); - challenge.setContestId(Long.valueOf(cm.get("contestId").toString())); - challenge.setComponentId(Long.valueOf(cm.get("componentId").toString())); - //remove submitter if there's we'll populate from legacy mm data below -// exclude this for now -// challenge.setResources(challenge.getResources().stream() -// .filter(r -> !"Submitter".equals(r.getRole())).collect(Collectors.toList())); - challenge.setNumSubmissions(0L); - challenge.setNumRegistrants(0L); - }); - //collect submitter from legacy mm - - mmResources.forEach(r -> { - ChallengeData challenge = challenges.stream().filter(c -> c.getId().equals(mmRoundToIdMaps.get(r.getChallengeId()))) - .findFirst().orElse(null); - if (challenge.getUserIds() == null) challenge.setUserIds(new ArrayList<>()); - challenge.getUserIds().add(r.getUserId()); - if (challenge.getHasUserSubmittedForReview() == null) challenge.setHasUserSubmittedForReview(new ArrayList<>()); - String hasSubmitted = (r.getSubmissionCount() > 0) ? r.getUserId().toString() + 'T' : r.getUserId().toString() + 'F'; - challenge.getHasUserSubmittedForReview().add(hasSubmitted); - challenge.setNumRegistrants(challenge.getNumRegistrants() + 1); - challenge.setNumSubmissions(challenge.getNumSubmissions() + r.getSubmissionCount()); - r.setChallengeId(null); -// challenge.getResources().add(r); - }); - } - - logger.info("pushing challenge data to elasticsearch for " + param.getChallengeIds()); - - try { - JestClientUtils.pushFeeders(jestClient, param, challenges); - } catch (IOException ioe) { - SupplyException se = new SupplyException("Internal server error occurs", ioe); - se.setStatusCode(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); - throw se; - } - } - - /** - * Get timestamp from the persistence - * - * @throws SupplyException if any error occurs - * @return the Date result - */ - public Date getTimestamp() throws SupplyException { - return this.challengeFeederDAO.getTimestamp().getDate(); - } - - /** - * Get changed challenge ids - * - * @param lastRunTimestamp the lastRunTimestamp to use - * @return the List result - */ - public List getChangedChallengeIds(Date lastRunTimestamp) { - if (lastRunTimestamp == null) { - throw new IllegalArgumentException("The lastRunTimestamp should be non-null."); - } - return this.challengeFeederDAO.getChangedChallengeIds(lastRunTimestamp); - } - - /** - * Associate all terms of use - * - * @param challenges the challenges to use - * @param termsOfUse the termsOfUse to use - */ - private void associateAllTermsOfUse(List challenges, List termsOfUse) { - for (TermsOfUseData item : termsOfUse) { - for (ChallengeData challenge : challenges) { - if (challenge.getId().equals(item.getChallengeId())) { - if (challenge.getTerms() == null) { - challenge.setTerms(new ArrayList()); - } - challenge.getTerms().add(item); - break; - } - } - } - for (TermsOfUseData item : termsOfUse) { - item.setChallengeId(null); - } - } - - /** - * Associate all fileTypes - * - * @param challenges the challenges to use - * @param fileTypes the fileTypes to use - */ - private void associateAllFileTypes(List challenges, List fileTypes) { - for (FileTypeData item : fileTypes) { - for (ChallengeData challenge : challenges) { - if (challenge.getId().equals(item.getChallengeId())) { - if (challenge.getFileTypes() == null) { - challenge.setFileTypes(new ArrayList()); - } - challenge.getFileTypes().add(item); - break; - } - } - } - for (FileTypeData item : fileTypes) { - item.setChallengeId(null); - } - } - - /** - * Associate all winners - * - * @param challenges the challenges to use - * @param winners the winners to use - */ - private void associateAllWinners(List challenges, List winners) { - for (WinnerData item : winners) { - for (ChallengeData challenge : challenges) { - if (challenge.getId().equals(item.getChallengeId())) { - if (challenge.getWinners() == null) { - challenge.setWinners(new ArrayList()); - } - challenge.getWinners().add(item); - break; - } - } - } - for (WinnerData item : winners) { - item.setChallengeId(null); - } - } - - /** - * Associate all submissions - * - * @param challenges the challenges to use - * @param submissions the submissions to use - */ - private void associateAllSubmissions(List challenges, List submissions) { - for (SubmissionData item : submissions) { - for (ChallengeData challenge : challenges) { - if (challenge.getId().equals(item.getChallengeId())) { - if (challenge.getSubmissions() == null) { - challenge.setSubmissions(new ArrayList()); - } - challenge.getSubmissions().add(item); - break; - } - } - } - for (SubmissionData item : submissions) { - item.setChallengeId(null); - } - } - - /** - * Associate all reviews - * - * @param challenges the challenges to use - * @param reviews the reviews to use - */ - private void associateAllReviews(List challenges, List reviews) { - for (ReviewData item : reviews) { - for (ChallengeData challenge : challenges) { - if (challenge.getId().equals(item.getChallengeId())) { - if (challenge.getReviews() == null) { - challenge.setReviews(new ArrayList()); - } - challenge.getReviews().add(item); - break; - } - } - } - for (ReviewData item : reviews) { - item.setChallengeId(null); - } - } - - /** - * Associate all properties - * - * @param challenges the challenges to use - * @param properties the properties to use - */ - private void associateAllProperties(List challenges, List properties) { - for (PropertyData item : properties) { - for (ChallengeData challenge : challenges) { - if (challenge.getId().equals(item.getChallengeId())) { - if (challenge.getProperties() == null) { - challenge.setProperties(new ArrayList()); - } - challenge.getProperties().add(item); - break; - } - } - } - for (PropertyData item : properties) { - item.setChallengeId(null); - } - } - - /** - * Associate all checkpointPrizes - * - * @param challenges the challenges to use - * @param checkpointPrizes the checkpointPrizes to use - */ - private void associateAllCheckpointPrizes(List challenges, List checkpointPrizes) { - for (CheckpointPrizeData item : checkpointPrizes) { - for (ChallengeData challenge : challenges) { - if (challenge.getId().equals(item.getChallengeId())) { - if (challenge.getCheckpointPrizes() == null) { - challenge.setCheckpointPrizes(new ArrayList()); - } - challenge.getCheckpointPrizes().add(item); - break; - } - } - } - for (CheckpointPrizeData item : checkpointPrizes) { - item.setChallengeId(null); - } - } - - /** - * Associate all prizes - * - * @param challenges the challenges to use - * @param prizes the prizes to use - */ - private void associateAllPrizes(List challenges, List prizes) { - for (PrizeData item : prizes) { - for (ChallengeData challenge : challenges) { - if (challenge.getId().equals(item.getChallengeId())) { - if (challenge.getPrizes() == null) { - challenge.setPrizes(new ArrayList()); - } - challenge.getPrizes().add(item); - break; - } - } - } - for (PrizeData item : prizes) { - item.setChallengeId(null); - } - } - - /** - * Associate all events - * - * @param challenges the challenges to use - * @param events the events to use - */ - private void associateAllEvents(List challenges, List events) { - for (EventData item : events) { - for (ChallengeData challenge : challenges) { - if (challenge.getId().equals(item.getChallengeId())) { - if (challenge.getEvents() == null) { - challenge.setEvents(new ArrayList()); - } - challenge.getEvents().add(item); - break; - } - } - } - for (EventData item : events) { - item.setChallengeId(null); - } - } - - /** - * Associate all phases - * - * @param challenges the challenges to use - * @param allPhases the allPhases to use - */ - private void associateAllPhases(List challenges, List allPhases) { - for (PhaseData aPhase : allPhases) { - for (ChallengeData challenge : challenges) { - if (challenge.getId().equals(aPhase.getChallengeId())) { - if (challenge.getPhases() == null) { - challenge.setPhases(new ArrayList()); - } - challenge.getPhases().add(aPhase); - break; - } - } - } - for (PhaseData aPhase : allPhases) { - aPhase.setChallengeId(null); - } - } - - /** - * Associate all resources - * - * @param challenges the challenges to use - * @param resources the resources to use - */ - private void associateAllResources(List challenges, List resources) { - for (ResourceData item : resources) { - for (ChallengeData challenge : challenges) { - if (challenge.getId().equals(item.getChallengeId())) { - if (challenge.getResources() == null) { - challenge.setResources(new ArrayList()); - } - challenge.getResources().add(item); - break; - } - } - } - for (ResourceData item : resources) { - item.setChallengeId(null); - } - } -} diff --git a/src/main/java/com/appirio/service/challengefeeder/manager/ChallengeFeederUtil.java b/src/main/java/com/appirio/service/challengefeeder/manager/ChallengeFeederUtil.java index 588ddf5..4215e89 100644 --- a/src/main/java/com/appirio/service/challengefeeder/manager/ChallengeFeederUtil.java +++ b/src/main/java/com/appirio/service/challengefeeder/manager/ChallengeFeederUtil.java @@ -3,22 +3,8 @@ */ package com.appirio.service.challengefeeder.manager; -import java.util.ArrayList; import java.util.List; -import com.appirio.service.challengefeeder.api.ChallengeData; -import com.appirio.service.challengefeeder.api.CheckpointPrizeData; -import com.appirio.service.challengefeeder.api.EventData; -import com.appirio.service.challengefeeder.api.FileTypeData; -import com.appirio.service.challengefeeder.api.PhaseData; -import com.appirio.service.challengefeeder.api.PrizeData; -import com.appirio.service.challengefeeder.api.PropertyData; -import com.appirio.service.challengefeeder.api.ResourceData; -import com.appirio.service.challengefeeder.api.ReviewData; -import com.appirio.service.challengefeeder.api.SubmissionData; -import com.appirio.service.challengefeeder.api.TermsOfUseData; -import com.appirio.service.challengefeeder.api.WinnerData; - /** * ChallengeFeederUtil provides common methods such as associating the challenge data. * @@ -29,305 +15,15 @@ * * Version 1.2 - Topcoder ElasticSearch Feeder Service - Way To Populate Challenge-Detail Index * - added {@link #getColorStyle(Integer)} + * + * Version 1.3 - Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + * - remove the useless methods + * * * @author TCSCODER - * @version 1.2 + * @version 1.3 */ public class ChallengeFeederUtil { - /** - * Associate all checkpointPrizes - * - * @param challenges the challenges to use - * @param checkpointPrizes the checkpointPrizes to use - */ - static void associateAllCheckpointPrizes(List challenges, List checkpointPrizes) { - for (CheckpointPrizeData item : checkpointPrizes) { - for (ChallengeData challenge : challenges) { - if (challenge.getId().equals(item.getChallengeId())) { - if (challenge.getCheckpointPrizes() == null) { - challenge.setCheckpointPrizes(new ArrayList()); - } - challenge.getCheckpointPrizes().add(item); - break; - } - } - } - for (CheckpointPrizeData item : checkpointPrizes) { - item.setChallengeId(null); - } - } - - /** - * Associate all events - * - * @param challenges the challenges to use - * @param events the events to use - */ - static void associateAllEvents(List challenges, List events) { - for (EventData item : events) { - for (ChallengeData challenge : challenges) { - if (challenge.getId().equals(item.getChallengeId())) { - if (challenge.getEvents() == null) { - challenge.setEvents(new ArrayList()); - } - challenge.getEvents().add(item); - break; - } - } - } - for (EventData item : events) { - item.setChallengeId(null); - } - } - - /** - * Associate all fileTypes - * - * @param challenges the challenges to use - * @param fileTypes the fileTypes to use - */ - static void associateAllFileTypes(List challenges, List fileTypes) { - for (FileTypeData item : fileTypes) { - for (ChallengeData challenge : challenges) { - if (challenge.getId().equals(item.getChallengeId())) { - if (challenge.getFileTypes() == null) { - challenge.setFileTypes(new ArrayList()); - } - challenge.getFileTypes().add(item); - break; - } - } - } - for (FileTypeData item : fileTypes) { - item.setChallengeId(null); - } - } - - /** - * Associate all phases - * - * @param challenges the challenges to use - * @param allPhases the allPhases to use - */ - static void associateAllPhases(List challenges, List allPhases) { - for (PhaseData aPhase : allPhases) { - for (ChallengeData challenge : challenges) { - if (challenge.getId().equals(aPhase.getChallengeId())) { - if (challenge.getPhases() == null) { - challenge.setPhases(new ArrayList()); - } - challenge.getPhases().add(aPhase); - break; - } - } - } - for (PhaseData aPhase : allPhases) { - aPhase.setChallengeId(null); - } - } - - /** - * Associate all prizes - * - * @param challenges the challenges to use - * @param prizes the prizes to use - */ - static void associateAllPrizes(List challenges, List prizes) { - for (PrizeData item : prizes) { - for (ChallengeData challenge : challenges) { - if (challenge.getId().equals(item.getChallengeId())) { - if (challenge.getPrizes() == null) { - challenge.setPrizes(new ArrayList()); - } - challenge.getPrizes().add(item); - break; - } - } - } - for (PrizeData item : prizes) { - item.setChallengeId(null); - } - } - - /** - * Associate all properties - * - * @param challenges the challenges to use - * @param properties the properties to use - */ - static void associateAllProperties(List challenges, List properties) { - for (PropertyData item : properties) { - for (ChallengeData challenge : challenges) { - if (challenge.getId().equals(item.getChallengeId())) { - if (challenge.getProperties() == null) { - challenge.setProperties(new ArrayList()); - } - challenge.getProperties().add(item); - break; - } - } - } - for (PropertyData item : properties) { - item.setChallengeId(null); - } - } - - /** - * Associate all resources - * - * @param challenges the challenges to use - * @param resources the resources to use - */ - static void associateAllResources(List challenges, List resources) { - for (ResourceData item : resources) { - for (ChallengeData challenge : challenges) { - if (challenge.getId().equals(item.getChallengeId())) { - if (challenge.getResources() == null) { - challenge.setResources(new ArrayList()); - } - challenge.getResources().add(item); - break; - } - } - } - for (ResourceData item : resources) { - item.setChallengeId(null); - } - } - - /** - * Associate all reviews - * - * @param challenges the challenges to use - * @param reviews the reviews to use - */ - static void associateAllReviews(List challenges, List reviews) { - for (ReviewData item : reviews) { - for (ChallengeData challenge : challenges) { - if (challenge.getId().equals(item.getChallengeId())) { - if (challenge.getReviews() == null) { - challenge.setReviews(new ArrayList()); - } - challenge.getReviews().add(item); - break; - } - } - } - for (ReviewData item : reviews) { - item.setChallengeId(null); - } - } - - /** - * Associate all submissions - * - * @param challenges the challenges to use - * @param submissions the submissions to use - */ - static void associateAllSubmissions(List challenges, List submissions) { - for (SubmissionData item : submissions) { - for (ChallengeData challenge : challenges) { - if (challenge.getId().equals(item.getChallengeId())) { - if (challenge.getSubmissions() == null) { - challenge.setSubmissions(new ArrayList()); - } - challenge.getSubmissions().add(item); - break; - } - } - } - for (SubmissionData item : submissions) { - item.setChallengeId(null); - } - } - - /** - * Associate all terms of use - * - * @param challenges the challenges to use - * @param termsOfUse the termsOfUse to use - */ - static void associateAllTermsOfUse(List challenges, List termsOfUse) { - for (TermsOfUseData item : termsOfUse) { - for (ChallengeData challenge : challenges) { - if (challenge.getId().equals(item.getChallengeId())) { - if (challenge.getTerms() == null) { - challenge.setTerms(new ArrayList()); - } - challenge.getTerms().add(item); - break; - } - } - } - for (TermsOfUseData item : termsOfUse) { - item.setChallengeId(null); - } - } - - /** - * Associate all winners - * - * @param challenges the challenges to use - * @param winners the winners to use - */ - static void associateAllWinners(List challenges, List winners) { - for (WinnerData item : winners) { - for (ChallengeData challenge : challenges) { - if (challenge.getId().equals(item.getChallengeId())) { - if (challenge.getWinners() == null) { - challenge.setWinners(new ArrayList()); - } - challenge.getWinners().add(item); - break; - } - } - } - for (WinnerData item : winners) { - item.setChallengeId(null); - } - } - - /** - * List the items as string - * - * @param items the items to use - * @return the String result, separated by comma - */ - static String listAsString(List items) { - StringBuilder res = new StringBuilder(); - for (int i = 0; i < items.size(); ++i) { - res.append(items.get(i).toString()); - if (i < items.size() - 1) { - res.append(", "); - } - } - - return res.toString(); - } - - /** - * Associate all pointPrizes - * - * @param challenges the challenges to use - * @param checkpointPrizes the checkpointPrizes to use - */ - static void associateAllPointsPrize(List challenges, List pointPrizes) { - for (PrizeData item : pointPrizes) { - for (ChallengeData challenge : challenges) { - if (challenge.getId().equals(item.getChallengeId())) { - if (challenge.getPointPrizes() == null) { - challenge.setPointPrizes(new ArrayList()); - } - challenge.getPointPrizes().add(item); - break; - } - } - } - for (PrizeData item : pointPrizes) { - item.setChallengeId(null); - } - } - /** * Get color style * @@ -335,7 +31,7 @@ static void associateAllPointsPrize(List challenges, List items) { + StringBuilder res = new StringBuilder(); + for (int i = 0; i < items.size(); ++i) { + res.append(items.get(i).toString()); + if (i < items.size() - 1) { + res.append(", "); + } + } + + return res.toString(); + } } diff --git a/src/main/java/com/appirio/service/challengefeeder/manager/ChallengeListingFeederManager.java b/src/main/java/com/appirio/service/challengefeeder/manager/ChallengeListingFeederManager.java index b3af792..d14fca6 100644 --- a/src/main/java/com/appirio/service/challengefeeder/manager/ChallengeListingFeederManager.java +++ b/src/main/java/com/appirio/service/challengefeeder/manager/ChallengeListingFeederManager.java @@ -5,7 +5,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Date; import java.util.List; import java.util.Map; @@ -23,7 +22,6 @@ import com.appirio.service.challengefeeder.api.challengelisting.EventData; import com.appirio.service.challengefeeder.api.challengelisting.WinnerData; import com.appirio.service.challengefeeder.config.ChallengeConfiguration; -import com.appirio.service.challengefeeder.dao.ChallengeFeederDAO; import com.appirio.service.challengefeeder.dao.ChallengeListingFeederDAO; import com.appirio.service.challengefeeder.dto.ChallengeFeederParam; import com.appirio.service.challengefeeder.util.JestClientUtils; @@ -40,8 +38,12 @@ * * It's added in Topcoder ElasticSearch Feeder Service - Way To Populate Challenge-Listing Index v1.0 * + * Version 1.1 - Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + * - remove the useless dao + * + * * @author TCCoder - * @version 1.0 + * @version 1.1 * */ public class ChallengeListingFeederManager { @@ -64,11 +66,6 @@ public class ChallengeListingFeederManager { * The constant for the design challenge type */ private static final String DESIGN_TYPE = "DESIGN"; - - /** - * DAO to access challenge data from the transactional database. - */ - private final ChallengeFeederDAO challengeFeederDAO; /** * DAO to access challenge data from the transactional database. @@ -89,14 +86,12 @@ public class ChallengeListingFeederManager { * Create ChallengeListingFeederManager * * @param jestClient the jestClient to use - * @param challengeFeederDAO the challengeFeederDAO to use * @param challengeListingFeederDAO the challengeListingFeederDAO to use * @param challengeConfiguration the challengeConfiguration to use */ - public ChallengeListingFeederManager(JestClient jestClient, ChallengeFeederDAO challengeFeederDAO, ChallengeListingFeederDAO challengeListingFeederDAO, + public ChallengeListingFeederManager(JestClient jestClient, ChallengeListingFeederDAO challengeListingFeederDAO, ChallengeConfiguration challengeConfiguration) { this.jestClient = jestClient; - this.challengeFeederDAO = challengeFeederDAO; this.challengeListingFeederDAO = challengeListingFeederDAO; this.challengeConfiguration = challengeConfiguration; } @@ -148,27 +143,27 @@ public void pushChallengeFeeder(ChallengeFeederParam param) throws SupplyExcepti List events = this.challengeListingFeederDAO.getEventsListing(queryParameter); associateAllEvents(challenges, events); - List phases = this.challengeFeederDAO.getPhases(queryParameter); + List phases = this.challengeListingFeederDAO.getPhases(queryParameter); associateAllPhases(challenges, phases); - List prizes = this.challengeFeederDAO.getPrizes(queryParameter); + List prizes = this.challengeListingFeederDAO.getPrizes(queryParameter); associateAllPrizes(challenges, prizes); - List pointPrizes = this.challengeFeederDAO.getPointsPrize(queryParameter); + List pointPrizes = this.challengeListingFeederDAO.getPointsPrize(queryParameter); associatePointPrizes(challenges, pointPrizes); - List fileTypes = this.challengeFeederDAO.getFileTypes(queryParameter); + List fileTypes = this.challengeListingFeederDAO.getFileTypes(queryParameter); associateAllFileTypes(challenges, fileTypes); List winners = this.challengeListingFeederDAO.getWinnersForChallengeListing(queryParameter); associateAllWinners(challenges, winners); List> checkpointsSubmissions = this.challengeListingFeederDAO.getCheckpointsSubmissions(queryParameter); - List> groupIds = this.challengeFeederDAO.getGroupIds(queryParameter); + List> groupIds = this.challengeListingFeederDAO.getGroupIds(queryParameter); List userIds = this.challengeListingFeederDAO.getChallengeUserIds(queryParameter); associateAllUserIds(challenges, userIds); - List> platforms = this.challengeFeederDAO.getChallengePlagforms(queryParameter); + List> platforms = this.challengeListingFeederDAO.getChallengePlatforms(queryParameter); for (Map item : platforms) { for (ChallengeListingData data : challenges) { if (data.getChallengeId().longValue() == Long.parseLong(item.get("challengeId").toString())) { @@ -179,7 +174,7 @@ public void pushChallengeFeeder(ChallengeFeederParam param) throws SupplyExcepti } } } - List> technoglies = this.challengeFeederDAO.getChallengeTechnologies(queryParameter); + List> technoglies = this.challengeListingFeederDAO.getChallengeTechnologies(queryParameter); for (Map item : technoglies) { for (ChallengeListingData data : challenges) { if (data.getChallengeId().longValue() == Long.parseLong(item.get("challengeId").toString())) { @@ -243,7 +238,7 @@ public void pushChallengeFeeder(ChallengeFeederParam param) throws SupplyExcepti * @return the Date result */ public Date getTimestamp() throws SupplyException { - return this.challengeFeederDAO.getTimestamp().getDate(); + return this.challengeListingFeederDAO.getTimestamp().getDate(); } /** @@ -256,7 +251,7 @@ public List getChangedChallengeIds(Date lastRunTimestamp) { if (lastRunTimestamp == null) { throw new IllegalArgumentException("The lastRunTimestamp should be non-null."); } - return this.challengeFeederDAO.getChangedChallengeIds(lastRunTimestamp); + return this.challengeListingFeederDAO.getChangedChallengeIds(lastRunTimestamp); } /** diff --git a/src/main/java/com/appirio/service/challengefeeder/manager/ChallengeListingMMFeederManager.java b/src/main/java/com/appirio/service/challengefeeder/manager/ChallengeListingMMFeederManager.java index c70e5e7..74b274d 100644 --- a/src/main/java/com/appirio/service/challengefeeder/manager/ChallengeListingMMFeederManager.java +++ b/src/main/java/com/appirio/service/challengefeeder/manager/ChallengeListingMMFeederManager.java @@ -9,7 +9,6 @@ import com.appirio.service.challengefeeder.api.challengelisting.ChallengeListingData; import com.appirio.service.challengefeeder.api.challengelisting.WinnerData; import com.appirio.service.challengefeeder.dao.ChallengeListingMMFeederDAO; -import com.appirio.service.challengefeeder.dao.MmFeederDAO; import com.appirio.service.challengefeeder.dto.MmFeederParam; import com.appirio.service.challengefeeder.util.JestClientUtils; import com.appirio.supply.SupplyException; @@ -25,7 +24,9 @@ import org.slf4j.LoggerFactory; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; import javax.servlet.http.HttpServletResponse; @@ -34,8 +35,11 @@ * * It's added in Topcoder ElasticSearch Feeder Service - Way To Populate Challenge-Listing Index For Legacy Marathon Matches v1.0 * + * Version 1.1 - Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + * - remove the useless dao + * * @author TCSCODER - * @version 1.0 + * @version 1.1 */ public class ChallengeListingMMFeederManager { @@ -53,11 +57,6 @@ public class ChallengeListingMMFeederManager { * The constant for the upcoming phase status name */ private static final String PHASE_SCHEDULED = "Scheduled"; - - /** - * DAO to access marathon match data from the transactional database. - */ - private final MmFeederDAO mmFeederDAO; /** * DAO to access marathon match data from the transactional database. @@ -78,12 +77,11 @@ public class ChallengeListingMMFeederManager { * Create ChallengeListingMMFeederManager * * @param jestClient the jestClient to use - * @param mmFeederDAO the mmFeederDAO to use * @param challengeListingMmFeederDAO the challengeListingMmFeederDAO to use + * @param forumLinkUrl the forumLinkUrl to use */ - public ChallengeListingMMFeederManager(JestClient jestClient, MmFeederDAO mmFeederDAO, ChallengeListingMMFeederDAO challengeListingMmFeederDAO, String forumLinkUrl) { + public ChallengeListingMMFeederManager(JestClient jestClient, ChallengeListingMMFeederDAO challengeListingMmFeederDAO, String forumLinkUrl) { this.jestClient = jestClient; - this.mmFeederDAO = mmFeederDAO; this.challengeListingMmFeederDAO = challengeListingMmFeederDAO; this.forumLinkUrl = forumLinkUrl; } @@ -106,13 +104,13 @@ public void pushMarathonMatchDataIntoChallenge(MmFeederParam param) throws Suppl DataScienceHelper.checkMissedIds(param, mms); // associate all the data - List phases = this.mmFeederDAO.getPhases(queryParameter); + List phases = this.challengeListingMmFeederDAO.getPhases(queryParameter); associateAllPhases(mms, phases); - List prizes = this.mmFeederDAO.getPrizes(queryParameter); + List prizes = this.challengeListingMmFeederDAO.getPrizes(queryParameter); associateAllPrizes(mms, prizes); - List events = this.mmFeederDAO.getEvents(queryParameter); + List events = this.challengeListingMmFeederDAO.getEvents(queryParameter); associateAllEvents(mms, events); List winners = this.challengeListingMmFeederDAO.getMarathonMatchWinners(queryParameter); @@ -254,7 +252,7 @@ private static void associateAllPhases(List challenges, Li * @return the timestamp result */ public Date getTimestamp() throws SupplyException { - return this.mmFeederDAO.getTimestamp().getDate(); + return this.challengeListingMmFeederDAO.getTimestamp().getDate(); } /** @@ -265,6 +263,6 @@ public Date getTimestamp() throws SupplyException { * @return The list of TCID. */ public List getMatchesWithRegistrationPhaseStartedIds(java.sql.Date date, long lastRunTimestamp) { - return this.mmFeederDAO.getMatchesWithRegistrationPhaseStartedIds(date, lastRunTimestamp); + return this.challengeListingMmFeederDAO.getMatchesWithRegistrationPhaseStartedIds(date, lastRunTimestamp); } } diff --git a/src/main/java/com/appirio/service/challengefeeder/manager/MmFeederManager.java b/src/main/java/com/appirio/service/challengefeeder/manager/MmFeederManager.java deleted file mode 100644 index 9a1aaec..0000000 --- a/src/main/java/com/appirio/service/challengefeeder/manager/MmFeederManager.java +++ /dev/null @@ -1,191 +0,0 @@ -/* - * Copyright (C) 2018 TopCoder Inc., All Rights Reserved. - */ -package com.appirio.service.challengefeeder.manager; - -import com.appirio.service.challengefeeder.Helper; -import com.appirio.service.challengefeeder.api.ChallengeData; -import com.appirio.service.challengefeeder.api.EventData; -import com.appirio.service.challengefeeder.api.PhaseData; -import com.appirio.service.challengefeeder.api.PrizeData; -import com.appirio.service.challengefeeder.api.TermsOfUseData; -import com.appirio.service.challengefeeder.dao.MmFeederDAO; -import com.appirio.service.challengefeeder.dto.MmFeederParam; -import com.appirio.service.challengefeeder.util.JestClientUtils; -import com.appirio.supply.SupplyException; -import com.appirio.supply.constants.SubTrack; -import com.appirio.tech.core.api.v3.*; -import com.appirio.tech.core.api.v3.request.FieldSelector; -import com.appirio.tech.core.api.v3.request.FilterParameter; -import com.appirio.tech.core.api.v3.request.QueryParameter; -import com.appirio.tech.core.auth.AuthUser; - -import io.searchbox.client.JestClient; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.util.*; - -import javax.servlet.http.HttpServletResponse; - -import java.util.stream.*; - -/** - * MmFeederManager is used to handle the marathon match feeders. - * - * It's added in Topcoder - Populate Marathon Match Related Data Into Challenge Model In Elasticsearch v1.0 - * - * Version 1.1 - Topcoder ElasticSearch Feeder Service - Way To Populate Challenge-Listing Index For Legacy Marathon Matches v1.0 - * - move the common validate methods to the DataScienceHelper class - * - * Version 1.2 - Topcoder ElasticSearch Feeder Service - Way To Populate Challenge-Detail Index For Legacy Marathon Matches v1.0 - * - move the common validation logic to the DataScienceHelper - * - * - * @author TCSCODER - * @version 1.2 - */ -public class MmFeederManager { - - /** - * Logger used to log events - */ - private static final Logger logger = LoggerFactory.getLogger(MmFeederManager.class); - - - /** - * DAO to access marathon match data from the transactional database. - */ - private final MmFeederDAO mmFeederDAO; - - - /** - * The jestClient field - */ - private final JestClient jestClient; - - /** - * Create MmFeederManager - * - * @param jestClient the jestClient to use - * @param mmFeederDAO the mmFeederDAO to use - */ - public MmFeederManager(JestClient jestClient, MmFeederDAO mmFeederDAO) { - this.jestClient = jestClient; - this.mmFeederDAO = mmFeederDAO; - } - - /** - * Push marathon match data into challenge model in elasticsearch. - * - * @param authUser the authUser to use - * @param param the param to use - * @throws SupplyException if any error occurs - */ - public void pushMarathonMatchDataIntoChallenge(AuthUser authUser, MmFeederParam param) throws SupplyException { - logger.info("Enter of pushMarathonMatchDataIntoChallenge"); - Helper.checkAdmin(authUser); - pushMarathonMatchDataIntoChallenge(param); - } - - /** - * Push marathon match data into challenge model in elasticsearch. - * - * @param param the param to use - * @throws SupplyException if any error occurs - */ - public void pushMarathonMatchDataIntoChallenge(MmFeederParam param) throws SupplyException { - logger.info("Enter of pushMarathonMatchDataIntoChallenge"); - DataScienceHelper.checkMarathonFeederParam(param, "challenges"); - - FilterParameter filter = new FilterParameter("roundIds=in(" + ChallengeFeederUtil.listAsString(param.getRoundIds()) + ")"); - QueryParameter queryParameter = new QueryParameter(new FieldSelector()); - queryParameter.setFilter(filter); - List mms = this.mmFeederDAO.getMarathonMatches(queryParameter); - - //set legacy mm subtrach to MARATHON_MATCH - mms.forEach(c -> { - if (c.getIsLegacy()) { - c.setSubTrackFromEnum(SubTrack.MARATHON_MATCH); - c.setIsBanner(false); - } - c.setIsTask(false); - c.setRoundId(c.getId()); - if (c.getNumSubmissions() == null) c.setNumSubmissions(0L); - if (c.getTotalPrize() == null) c.setTotalPrize(0.0); - }); - - DataScienceHelper.checkMissedIds(param, mms); - - //filter isLegacy, if set up - if (param.getLegacy() != null) { - mms = mms.stream().filter(c -> c.getIsLegacy().equals(param.getLegacy())).collect(Collectors.toList()); - } - - // associate all the data - List phases = this.mmFeederDAO.getPhases(queryParameter); - for (PhaseData data : phases) { - if (data.getActualStartTime() != null && data.getActualEndTime() != null) { - data.setDuration((data.getActualEndTime().getTime() - data.getActualStartTime().getTime()) / 1000); - } - } - ChallengeFeederUtil.associateAllPhases(mms, phases); - - List prizes = this.mmFeederDAO.getPrizes(queryParameter); - ChallengeFeederUtil.associateAllPrizes(mms, prizes); -// exclude this for now -// List submissions = this.mmFeederDAO.getSubmissions(queryParameter); -// ChallengeFeederUtil.associateAllSubmissions(mms, submissions); - - List termsOfUse = this.mmFeederDAO.getTerms(queryParameter); - ChallengeFeederUtil.associateAllTermsOfUse(mms, termsOfUse); - - List events = this.mmFeederDAO.getEvents(queryParameter); - ChallengeFeederUtil.associateAllEvents(mms, events); -// exclude this for now -// List resources = this.mmFeederDAO.getResources(queryParameter); -// // set the user ids before associating the resources as the associate method will set the challenge id to null -// for (ChallengeData data : mms) { -// for (ResourceData resourceData : resources) { -// if (data.getId().longValue() == resourceData.getChallengeId().longValue()) { -// if (data.getUserIds() == null) { -// data.setUserIds(new ArrayList()); -// } -// data.getUserIds().add(resourceData.getUserId()); -// } -// } -// } -// ChallengeFeederUtil.associateAllResources(mms, resources); - - try { - JestClientUtils.pushFeeders(jestClient, param, mms); - } catch (IOException ioe) { - SupplyException se = new SupplyException("Internal server error occurs", ioe); - se.setStatusCode(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); - throw se; - } - } - - /** - * Get current timestamp from the database. - * - * @throws SupplyException if any error occurs - * @return the timestamp result - */ - public Date getTimestamp() throws SupplyException { - return this.mmFeederDAO.getTimestamp().getDate(); - } - - /** - * Get the marathon matches whose registration phase started after the specified date and after the last run timestamp. - * - * @param date The date param. - * @param lastRunTimestamp The last run timestamp. - * @return The list of TCID. - */ - public List getMatchesWithRegistrationPhaseStartedIds(java.sql.Date date, long lastRunTimestamp) { - return this.mmFeederDAO.getMatchesWithRegistrationPhaseStartedIds(date, lastRunTimestamp); - } -} diff --git a/src/main/java/com/appirio/service/challengefeeder/resources/ChallengeFeederResource.java b/src/main/java/com/appirio/service/challengefeeder/resources/ChallengeFeederResource.java index 5b31fed..5c41b69 100644 --- a/src/main/java/com/appirio/service/challengefeeder/resources/ChallengeFeederResource.java +++ b/src/main/java/com/appirio/service/challengefeeder/resources/ChallengeFeederResource.java @@ -4,10 +4,8 @@ package com.appirio.service.challengefeeder.resources; import com.appirio.service.challengefeeder.dto.ChallengeFeederParam; -import com.appirio.service.challengefeeder.manager.ChallengeFeederManager; import com.appirio.service.supply.resources.MetadataApiResponseFactory; import com.appirio.supply.ErrorHandler; -import com.appirio.supply.SupplyException; import com.appirio.tech.core.api.v3.request.PostPutRequest; import com.appirio.tech.core.api.v3.request.annotation.AllowAnonymous; import com.appirio.tech.core.api.v3.response.ApiResponse; @@ -16,7 +14,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.servlet.http.HttpServletResponse; import javax.validation.Valid; import javax.ws.rs.Consumes; import javax.ws.rs.PUT; @@ -27,9 +24,13 @@ /** * Resource to handle the challenge feeder * + * Version 1.1 - Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + * - make it dummy + * + * * * @author TCSCODER - * @version 1.0 + * @version 1.1 */ @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @@ -41,18 +42,10 @@ public class ChallengeFeederResource { */ private static final Logger logger = LoggerFactory.getLogger(ChallengeFeederResource.class); - /** - * Manager to access search business logic - */ - private final ChallengeFeederManager challengeFeederManager; - /** * Create ChallengeFeederResource - * - * @param challengeFeederManager the challengeManager to use */ - public ChallengeFeederResource(ChallengeFeederManager challengeFeederManager) { - this.challengeFeederManager = challengeFeederManager; + public ChallengeFeederResource() { } /** @@ -66,10 +59,6 @@ public ChallengeFeederResource(ChallengeFeederManager challengeFeederManager) { @AllowAnonymous public ApiResponse pushChallengeFeeders(@Valid PostPutRequest request) { try { - if (request == null || request.getParam() == null) { - throw new SupplyException("The request body should be provided", HttpServletResponse.SC_BAD_REQUEST); - } - this.challengeFeederManager.pushChallengeFeeder(request.getParam()); return MetadataApiResponseFactory.createResponse(null); } catch (Exception e) { return ErrorHandler.handle(e, logger); diff --git a/src/main/java/com/appirio/service/challengefeeder/resources/MmFeederResource.java b/src/main/java/com/appirio/service/challengefeeder/resources/MmFeederResource.java index e9d8076..cc7802a 100644 --- a/src/main/java/com/appirio/service/challengefeeder/resources/MmFeederResource.java +++ b/src/main/java/com/appirio/service/challengefeeder/resources/MmFeederResource.java @@ -4,10 +4,8 @@ package com.appirio.service.challengefeeder.resources; import com.appirio.service.challengefeeder.dto.MmFeederParam; -import com.appirio.service.challengefeeder.manager.MmFeederManager; import com.appirio.service.supply.resources.MetadataApiResponseFactory; import com.appirio.supply.ErrorHandler; -import com.appirio.supply.SupplyException; import com.appirio.tech.core.api.v3.request.PostPutRequest; import com.appirio.tech.core.api.v3.response.ApiResponse; import com.appirio.tech.core.auth.AuthUser; @@ -17,7 +15,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.servlet.http.HttpServletResponse; import javax.validation.Valid; import javax.ws.rs.Consumes; import javax.ws.rs.PUT; @@ -30,9 +27,13 @@ * * It's added in Topcoder - Populate Marathon Match Related Data Into Challenge Model In Elasticsearch v1.0 * + * Version 1.1 - Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + * - make it dummy + * + * * * @author TCSCODER - * @version 1.0 + * @version 1.1 */ @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @@ -44,18 +45,12 @@ public class MmFeederResource { */ private static final Logger logger = LoggerFactory.getLogger(MmFeederResource.class); - /** - * Manager to handle the marathon match feeders - */ - private final MmFeederManager mmFeederManager; /** * Create MmFeederResource * - * @param mmFeederManager the mmFeederManager to use */ - public MmFeederResource(MmFeederManager mmFeederManager) { - this.mmFeederManager = mmFeederManager; + public MmFeederResource() { } /** @@ -70,10 +65,6 @@ public MmFeederResource(MmFeederManager mmFeederManager) { @Timed public ApiResponse pushMarathonMatchDataIntoChallenge(@Auth AuthUser user, @Valid PostPutRequest request) { try { - if (request == null || request.getParam() == null) { - throw new SupplyException("The request body should be provided", HttpServletResponse.SC_BAD_REQUEST); - } - this.mmFeederManager.pushMarathonMatchDataIntoChallenge(user, request.getParam()); return MetadataApiResponseFactory.createResponse(null); } catch (Exception e) { return ErrorHandler.handle(e, logger); diff --git a/src/main/java/com/appirio/service/resourcefactory/ChallengeFeederFactory.java b/src/main/java/com/appirio/service/resourcefactory/ChallengeFeederFactory.java index b669b44..443de80 100644 --- a/src/main/java/com/appirio/service/resourcefactory/ChallengeFeederFactory.java +++ b/src/main/java/com/appirio/service/resourcefactory/ChallengeFeederFactory.java @@ -1,19 +1,20 @@ package com.appirio.service.resourcefactory; -import com.appirio.service.challengefeeder.dao.ChallengeFeederDAO; -import com.appirio.service.challengefeeder.manager.ChallengeFeederManager; import com.appirio.service.challengefeeder.resources.ChallengeFeederResource; import com.appirio.service.supply.resources.ResourceFactory; -import com.appirio.supply.DAOFactory; import com.appirio.supply.SupplyException; import io.searchbox.client.JestClient; /** * Factory for ChallengeFeederResource + * + * Version 1.1 - Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + * - create dummy resource + * * * * @author TCSCODER - * @version 1.0 + * @version 1.1 */ public class ChallengeFeederFactory implements ResourceFactory { @@ -39,8 +40,6 @@ public ChallengeFeederFactory(JestClient jestClient) { */ @Override public ChallengeFeederResource getResourceInstance() throws SupplyException { - final ChallengeFeederManager challengeManager = new ChallengeFeederManager(jestClient, DAOFactory.getInstance().createDAO(ChallengeFeederDAO.class)); - - return new ChallengeFeederResource(challengeManager); + return new ChallengeFeederResource(); } } diff --git a/src/main/java/com/appirio/service/resourcefactory/MmFeederResourceFactory.java b/src/main/java/com/appirio/service/resourcefactory/MmFeederResourceFactory.java index 9c51ef5..f453757 100644 --- a/src/main/java/com/appirio/service/resourcefactory/MmFeederResourceFactory.java +++ b/src/main/java/com/appirio/service/resourcefactory/MmFeederResourceFactory.java @@ -3,11 +3,8 @@ */ package com.appirio.service.resourcefactory; -import com.appirio.service.challengefeeder.dao.MmFeederDAO; -import com.appirio.service.challengefeeder.manager.MmFeederManager; import com.appirio.service.challengefeeder.resources.MmFeederResource; import com.appirio.service.supply.resources.ResourceFactory; -import com.appirio.supply.DAOFactory; import com.appirio.supply.SupplyException; import io.searchbox.client.JestClient; @@ -15,9 +12,13 @@ * Factory for MmFeederResource * * It's added in Topcoder - Populate Marathon Match Related Data Into Challenge Model In Elasticsearch v1.0 + * + * Version 1.1 - Topcoder Elasticsearch Feeder Service - Jobs Cleanup And Improvement v1.0 + * - create dummy resource + * * * @author TCSCODER - * @version 1.0 + * @version 1.1 */ public class MmFeederResourceFactory implements ResourceFactory { @@ -43,7 +44,6 @@ public MmFeederResourceFactory(JestClient jestClient) { */ @Override public MmFeederResource getResourceInstance() throws SupplyException { - final MmFeederManager mmFeederManager = new MmFeederManager(jestClient, DAOFactory.getInstance().createDAO(MmFeederDAO.class)); - return new MmFeederResource(mmFeederManager); + return new MmFeederResource(); } } \ No newline at end of file diff --git a/src/main/resources/elasticsearch-feeder-service.yaml b/src/main/resources/elasticsearch-feeder-service.yaml index 85939b7..59597d7 100644 --- a/src/main/resources/elasticsearch-feeder-service.yaml +++ b/src/main/resources/elasticsearch-feeder-service.yaml @@ -50,45 +50,42 @@ challengeConfiguration: studioForumsUrlPrefix: https://studio.topcoder.com/forums/?module=Category&categoryID= developForumsUrlPrefix: https://apps.topcoder.com/forums/?module=Category&categoryID= directProjectLink: https://www.topcoder.com/direct/contest/detail.action?projectId= - -redissonConfiguration: - forceInitialLoad: "${REDISSON_JOB_FORCE_INITIAL_LOAD:-false}" - challengesIndex: "${REDISSON_JOB_CHALLENGES_INDEX:-challenges}" - mmIndex: "${REDISSON_JOB_MMACHES_INDEX:-mmatches}" - srmsIndex: "${REDISSON_JOB_SRMS_INDEX:-srms}" - challengesType: "${REDISSON_JOB_CHALLENGES_TYPE:-challenges}" - mmType: "${REDISSON_JOB_MMATCHES_TYPE:-mmatches}" - srmsType: "${REDISSON_JOB_SRMS_TYPE:-srms}" - challengesListingIndex: "${REDISSON_JOB_CHALLENGES_LISTING_INDEX:-challengeslisting}" - challengesListingType: "${REDISSON_JOB_CHALLENGES_LISTING_TYPE:-challenges}" - challengesDetailIndex: "${REDISSON_JOB_CHALLENGES_DETAIL_INDEX:-challengesdetail}" - challengesDetailType: "${REDISSON_JOB_CHALLENGES_DETAIL_TYPE:-challenges}" - batchUpdateSize: "${REDISSON_JOB_BATCH_UPDATE_SIZE:-1}" - loadChangedChallengesJobLastRunTimestampPrefix: "${REDISSON_LOAD_CHANGED_CHALLENGES_JOB_LAST_RUN_TIMESTAMP_PREFIX:-loadChangedChallengesJobLastRunTimestamp}" - loadChangedChallengesListingJobLastRunTimestampPrefix: "${REDISSON_LOAD_CHANGED_CHALLENGES_LISTING_JOB_LAST_RUN_TIMESTAMP_PREFIX:-loadChangedChallengesListingJobLastRunTimestamp}" - loadChangedChallengesDetailJobLastRunTimestampPrefix: "${REDISSON_LOAD_CHANGED_CHALLENGES_DETAIL_JOB_LAST_RUN_TIMESTAMP_PREFIX:-loadChangedChallengesDetailJobLastRunTimestamp}" - legacyMMToChallengeListingJobLastRunTimestampPrefix: "${REDISSON_LOAD_CHANGED_MM_CHALLENGE_LISTING_JOB_LAST_RUN_TIMESTAMP_PREFIX:-legacyMMToChallengeListingJobLastRunTimestampPrefix}" - marathonMatchesJobLastRunTimestampPrefix: "${REDISSON_MM_JOB_LAST_RUN_TIMESTAMP_PREFIX:-marathonMatchesJobLastRunTimestamp}" - singleRoundMatchesJobLastRunTimestampPrefix: "${REDISSON_SRM_JOB_LAST_RUN_TIMESTAMP_PREFIX:-singleRoundMatchesJobLastRunTimestamp}" - legacyMMJobLastRunTimestampPrefix: "${REDISSON_SRM_JOB_LAST_RUN_TIMESTAMP_PREFIX:-legacyMMJobLastRunTimestamp}" - loadChangedMMChallengeDetailJobLastRunTimestampPrefix: "${REDISSON_LOAD_CHANGED_MM_CHALLENGE_DETAIL_JOB_LAST_RUN_TIMESTAMP_PREFIX:-loadChangedMMChallengeDetailJobLastRunTimestamp}" - loadChangedMMChallengeDetailJobLockerKeyName: "${REDISSON_LOAD_CHANGED_MM_CHALLENGE_DETAIL_JOB_LOCK_KEY:-loadChangedMMChallengeDetailJobLockKey}" - loadChangedChallengesJobLockerKeyName: "${REDISSON_LOAD_CHANGED_CHALLENGES_JOB_LOCK_KEY:-loadChangedChallengesJobLockKey}" - loadChangedChallengesListingJobLockerKeyName: "${REDISSON_LOAD_CHANGED_CHALLENGES_LISTING_JOB_LOCK_KEY:-loadChangedChallengesListingJobLockKey}" - loadChangedChallengesDetailJobLockerKeyName: "${REDISSON_LOAD_CHANGED_CHALLENGES_DETAIL_JOB_LOCK_KEY:-loadChangedChallengesDetailJobLockKey}" - marathonMatchesJobLockerKeyName: "${REDISSON_MM_JOB_LOCK_KEY:-marathonMatchesJobLockerKey}" - singleRoundMatchesJobLockerKeyName: "${REDISSON_SRM_JOB_LOCK_KEY:-singleRoundMatchesJobLockerKey}" - legacyMMJobLockerKeyName: "${REDISSON_LEGACY_MM_JOB_LOCK_KEY:-legacyMMJobLockerKey}" - legacyMMToChallengeListingJobLockerKeyName: "${REDISSON_LEGACY_MM_TO_CHALLENGE_LISTING_JOB_LOCK_KEY:-legacyMMToChallengeListingJobLockerKey}" - lockWatchdogTimeout: "${REDISSON_JOB_LOCK_WATCHDOG_TIMEOUT:-30000}" - singleServerAddress: "${REDISSON_JOB_SINGLE_SERVER_ADDRESS:-redis://cockpit.cloud.topcoder.com:6379}" - clusterEnabled: "${REDISSON_JOB_CLUSTER_ENABLED:-false}" - nodeAddresses: - - "${REDISSON_JOB_NODE1:-redis://localhost:7001}" - # number of last x days for the jobs - marathonMatchesDaysToSubtract: -180 - singleRoundMatchesDaysToSubtract: -60 - marathonMatchesForumUrl: "https://apps.topcoder.com/forums/?module=ThreadList&forumID=" + + + +jobsConfiguration: + redissonConfiguration: + lockWatchdogTimeout: "${REDISSON_JOB_LOCK_WATCHDOG_TIMEOUT:-30000}" + singleServerAddress: "${REDISSON_JOB_SINGLE_SERVER_ADDRESS:-redis://cockpit.cloud.topcoder.com:6379}" + clusterEnabled: "${REDISSON_JOB_CLUSTER_ENABLED:-false}" + nodeAddresses: + - "${REDISSON_JOB_NODE1:-redis://localhost:7001}" + loadChangedChallengesListingJob: + indexName: "${REDISSON_JOB_LOAD_CHANGED_CHALLENGES_LISTING_JOB_INDEX_NAME:-challengeslisting}" + batchUpdateSize: "${REDISSON_JOB_LOAD_CHANGED_CHALLENGES_LISTING_JOB_BATCH_UPDATE_SIZE:-50}" + loadChangedChallengesDetailJob: + indexName: "${REDISSON_JOB_LOAD_CHANGED_CHALLENGES_DETAIL_JOB_INDEX_NAME:-challengesdetail}" + batchUpdateSize: "${REDISSON_JOB_LOAD_CHANGED_CHALLENGES_DETAIL_JOB_BATCH_UPDATE_SIZE:-50}" + legacyMMToChallengeListingJob: + indexName: "${REDISSON_JOB_LEGACY_MM_TO_CHALLENGE_LISTING_JOB_INDEX_NAME:-challengeslisting}" + batchUpdateSize: "${REDISSON_JOB_LEGACY_MM_TO_CHALLENGE_LISTING_JOB_BATCH_UPDATE_SIZE:-1}" + marathonMatchesDaysToSubtract: -180 + marathonMatchesForumUrl: "https://apps.topcoder.com/forums/?module=ThreadList&forumID=" + loadChangedMMChallengeDetailJob: + indexName: "${REDISSON_JOB_LOAD_CHANGED_MM_CHALLENGE_DETAIL_JOB_INDEX_NAME:-challengesdetail}" + batchUpdateSize: "${REDISSON_JOB_LOAD_CHANGED_MM_CHALLENGE_DETAIL_JOB_BATCH_UPDATE_SIZE:-1}" + marathonMatchesDaysToSubtract: -180 + marathonMatchesForumUrl: "https://apps.topcoder.com/forums/?module=ThreadList&forumID=" + marathonMatchesJob: + indexName: "${REDISSON_JOB_MARATHON_MATCHES_JOB_INDEX_NAME:-mmatches}" + batchUpdateSize: "${REDISSON_JOB_MARATHON_MATCHES_JOB_BATCH_UPDATE_SIZE:-1}" + # number of last x days for the jobs + marathonMatchesDaysToSubtract: -180 + marathonMatchesForumUrl: "https://apps.topcoder.com/forums/?module=ThreadList&forumID=" + singleRoundMatchesJob: + indexName: "${REDISSON_JOB_SINGLE_ROUND_MATCHES_JOB_INDEX_NAME:-srms}" + batchUpdateSize: "${REDISSON_JOB_SINGLE_ROUND_MATCHES_JOB_BATCH_UPDATE_SIZE:-1}" + singleRoundMatchesDaysToSubtract: -60 jobs: com.appirio.service.challengefeeder.job.LoadChangedMMChallengeDetailJob: "${LOAD_LEGACY_MARATHON_MATCHES_INTO_CHALLENGE_DETAILS_JOB_INTERVAL:-30s}" diff --git a/src/main/resources/sql/challenge-feeder/get_challenges.properties b/src/main/resources/sql/challenge-feeder/get_challenges.properties deleted file mode 100644 index 88f7284..0000000 --- a/src/main/resources/sql/challenge-feeder/get_challenges.properties +++ /dev/null @@ -1,2 +0,0 @@ -filter.challengeIds.template=p.project_id in () -filter.challengeIds.type=List \ No newline at end of file diff --git a/src/main/resources/sql/challenge-feeder/get_challenges.sql b/src/main/resources/sql/challenge-feeder/get_challenges.sql deleted file mode 100644 index 6989766..0000000 --- a/src/main/resources/sql/challenge-feeder/get_challenges.sql +++ /dev/null @@ -1,123 +0,0 @@ -SELECT - p.create_user AS createdby, - p.create_date AS createdat, - p.modify_user AS updatedby, - p.modify_date AS updatedat, - p.project_id _id, - p.project_id AS id, - CASE WHEN rel_flag.value = 'true' THEN rel_cost.value::decimal ELSE NULL END AS reliabilityBonus, - CASE WHEN dr_elligible.value = 'On' THEN dr_points.value::decimal ELSE NULL END AS drPoints, - env.value AS environment, - code_repo.value AS codeRepo, -/* pspec.detailed_requirements_text AS softwareDetailRequirements, - pspec.final_submission_guidelines_text AS softwareFinalSubmissionGuidelines, - pspec.private_description_text AS copilotDetailRequirements, - pss.contest_description_text AS studioDetailRequirements, - pss.contest_introduction AS introduction, - pss.round_one_introduction AS round1Introduction, - pss.round_two_introduction AS round2Introduction, - pmm_spec.match_details AS marathonMatchDetailRequirements, - pmm_spec.match_rules AS marathonMatchRules, -*/ - pn.value AS name, - CASE - WHEN (ptl.description = 'Application') THEN 'DEVELOP' - WHEN (ptl.description = 'Component') THEN 'DEVELOP' - WHEN (ptl.description = 'Studio') THEN 'DESIGN' - ELSE 'GENERIC' - END AS track, - pcl.description AS subTrack, - pstatus.NAME AS status, - Technology_list(pi1.value) AS technologies, - Platform_list(p.project_id) AS platforms, - Nvl(pp1.actual_start_time, pp1.scheduled_start_time) AS registrationStartDate, - Nvl(pp1.actual_end_time, pp1.scheduled_end_time) AS registrationEndDate, - Nvl(pp2.actual_end_time, pp2.scheduled_end_time) AS submissionEndDate, - review_type_info.value AS reviewType, - forum_id_info.value AS forumId, - (SELECT Count(*) - FROM submission s1 - INNER JOIN upload u1 ON s1.upload_id = u1.upload_id - WHERE u1.project_id = p.project_id - AND s1.submission_type_id = 1 - AND s1.submission_status_id <> 5) AS numSubmissions, - (SELECT Count(*) - FROM RESOURCE r - WHERE r.project_id = p.project_id - AND r.resource_role_id = 1) AS numregistrants, - Nvl(pp15.actual_end_time, pp15.scheduled_end_time) AS checkpointSubmissionEndDate, - Nvl( - (SELECT Sum(pr.number_of_submissions) - FROM prize pr - WHERE pr.project_id = p.project_id - AND pr.prize_type_id = 14), 0) AS numberOfCheckpointPrizes, - (SELECT Sum(prize_amount*number_of_submissions) - FROM prize pr - WHERE pr.project_id = p.project_id - AND pr.prize_type_id = 14) AS totalCheckPointPrize, - Nvl( - (SELECT sum(prize_amount) - FROM prize pr - WHERE pr.project_id = p.project_id - AND pr.prize_type_id = 15),0) AS totalPrize, - Nvl( - (SELECT Cast('t' AS BOOLEAN) - FROM contest_eligibility - WHERE contest_id = p.project_id - AND contest_id NOT IN - (SELECT contest_id - FROM contest_eligibility - GROUP BY contest_id - HAVING Count(*) > 1)), Cast('f' AS BOOLEAN)) AS isPrivate, - p.tc_direct_project_id AS directProjectId, - tcdirect.NAME AS directProjectName, - pvs.value As submissionViewable, - (SELECT CASE - WHEN t.count > 0 THEN Cast('t' AS BOOLEAN) - ELSE Cast('f' AS BOOLEAN) - END - FROM - (SELECT count(*) AS COUNT - FROM project_info pti - WHERE pti.project_id = p.project_id - AND pti.project_info_type_id = 82 - AND pti.value = '1') AS t) AS isTask, - (pi87.value = 'Banner') AS isBanner, - pi56.value::Decimal As roundId - FROM project p - INNER JOIN project_status_lu pstatus ON pstatus.project_status_id = p.project_status_id - INNER JOIN project_category_lu pcl ON pcl.project_category_id = p.project_category_id - INNER JOIN project_type_lu ptl ON ptl.project_type_id = pcl.project_type_id - INNER JOIN project_phase pp1 ON pp1.project_id = p.project_id - AND pp1.phase_type_id = 1 - INNER JOIN project_phase pp2 ON pp2.project_id = p.project_id - AND pp2.phase_type_id = 2 - INNER JOIN project_info pn ON pn.project_id = p.project_id - AND pn.project_info_type_id = 6 - INNER JOIN project_info pi1 ON pi1.project_id = p.project_id - AND pi1.project_info_type_id = 1 - LEFT JOIN TCS_CATALOG\:project_info AS forum_id_info ON forum_id_info.project_id = p.project_id - AND forum_id_info.project_info_type_id = 4 - LEFT JOIN TCS_CATALOG\:project_info AS review_type_info ON review_type_info.project_id = p.project_id - AND review_type_info.project_info_type_id = 79 - LEFT JOIN project_phase pp15 ON pp15.project_id = p.project_id - AND pp15.phase_type_id = 15 - LEFT JOIN project_info pidr ON pidr.project_id = p.project_id - AND pidr.project_info_type_id = 26 - LEFT JOIN CORPORATE_OLTP\:tc_direct_project AS tcdirect ON p.tc_direct_project_id = tcdirect.project_id - LEFT JOIN project_info pvs ON pvs.project_id = p.project_id - AND pvs.project_info_type_id = 53 - LEFT JOIN project_info rel_flag ON rel_flag.project_id = p.project_id AND rel_flag.project_info_type_id = 45 - LEFT JOIN project_info rel_cost ON rel_cost.project_id = p.project_id AND rel_cost.project_info_type_id = 38 - LEFT JOIN project_info dr_points ON dr_points.project_id = p.project_id AND dr_points.project_info_type_id = 30 - LEFT JOIN project_info dr_elligible ON dr_elligible.project_id = p.project_id AND dr_elligible.project_info_type_id = 26 - LEFT JOIN project_info env ON env.project_id = p.project_id AND env.project_info_type_id = 84 - LEFT JOIN project_info code_repo ON code_repo.project_id = p.project_id AND code_repo.project_info_type_id = 85 -/* LEFT JOIN project_spec pspec ON pspec.project_id = p.project_id AND pspec.version = (select MAX(project_spec.version) from project_spec where project_spec.project_id = p.project_id) - LEFT JOIN project_studio_specification pss ON pss.project_studio_spec_id = p.project_studio_spec_id - LEFT JOIN project_mm_specification pmm_spec ON pmm_spec.project_mm_spec_id = p.project_mm_spec_id -*/ - LEFT JOIN project_info pi87 ON pi87.project_id = p.project_id AND pi87.project_info_type_id = 87 - LEFT JOIN project_info pi56 ON pi56.project_id = p.project_id AND pi56.project_info_type_id = 56 - WHERE pcl.project_category_id NOT IN (27) - AND {filter} \ No newline at end of file diff --git a/src/main/resources/sql/challenge-feeder/get_checkpoint_prizes.properties b/src/main/resources/sql/challenge-feeder/get_checkpoint_prizes.properties deleted file mode 100644 index b84a1b8..0000000 --- a/src/main/resources/sql/challenge-feeder/get_checkpoint_prizes.properties +++ /dev/null @@ -1,2 +0,0 @@ -filter.challengeIds.template=prize.project_id in () -filter.challengeIds.type=List \ No newline at end of file diff --git a/src/main/resources/sql/challenge-feeder/get_checkpoint_prizes.sql b/src/main/resources/sql/challenge-feeder/get_checkpoint_prizes.sql deleted file mode 100644 index 7cbed41..0000000 --- a/src/main/resources/sql/challenge-feeder/get_checkpoint_prizes.sql +++ /dev/null @@ -1,10 +0,0 @@ -SELECT - prize.prize_id as prizeId, - prize.place as place, - prize.prize_amount as amount, - prize.number_of_submissions as numberOfSubmissions, - prize.project_id as challengeId - FROM - prize AS prize - INNER JOIN project AS project ON prize.project_id = project.project_id - WHERE prize.prize_type_id = 14 and {filter} \ No newline at end of file diff --git a/src/main/resources/sql/challenge-feeder/get_events.properties b/src/main/resources/sql/challenge-feeder/get_events.properties deleted file mode 100644 index ab0e858..0000000 --- a/src/main/resources/sql/challenge-feeder/get_events.properties +++ /dev/null @@ -1,2 +0,0 @@ -filter.challengeIds.template=x.project_id in () -filter.challengeIds.type=List \ No newline at end of file diff --git a/src/main/resources/sql/challenge-feeder/get_events.sql b/src/main/resources/sql/challenge-feeder/get_events.sql deleted file mode 100644 index c25b1c3..0000000 --- a/src/main/resources/sql/challenge-feeder/get_events.sql +++ /dev/null @@ -1,10 +0,0 @@ -SELECT - x.project_id AS challengeId, - e.event_id AS eventId, - e.event_short_desc AS eventName - FROM - common_oltp\:event e, - contest_project_xref x, - contest c - WHERE e.event_id = c.event_id and c.contest_id = x.contest_id - AND {filter} \ No newline at end of file diff --git a/src/main/resources/sql/challenge-feeder/get_mm_contest.properties b/src/main/resources/sql/challenge-feeder/get_mm_contest.properties deleted file mode 100644 index e3a9f27..0000000 --- a/src/main/resources/sql/challenge-feeder/get_mm_contest.properties +++ /dev/null @@ -1,2 +0,0 @@ -filter.roundIds.template=rr.round_id in () -filter.roundIds.type=List \ No newline at end of file diff --git a/src/main/resources/sql/challenge-feeder/get_mm_contest.sql b/src/main/resources/sql/challenge-feeder/get_mm_contest.sql deleted file mode 100644 index c4909d8..0000000 --- a/src/main/resources/sql/challenge-feeder/get_mm_contest.sql +++ /dev/null @@ -1,6 +0,0 @@ -select rr.round_id as roundId, - rr.contest_id as contestId, - rc.component_id as componentId -from informixoltp\:round rr - left join informixoltp\:round_component as rc on rc.round_id = rr.round_id -where {filter} \ No newline at end of file diff --git a/src/main/resources/sql/challenge-feeder/get_properties.properties b/src/main/resources/sql/challenge-feeder/get_properties.properties deleted file mode 100644 index 93dd720..0000000 --- a/src/main/resources/sql/challenge-feeder/get_properties.properties +++ /dev/null @@ -1,2 +0,0 @@ -filter.challengeIds.template=pi.project_id in () -filter.challengeIds.type=List \ No newline at end of file diff --git a/src/main/resources/sql/challenge-feeder/get_properties.sql b/src/main/resources/sql/challenge-feeder/get_properties.sql deleted file mode 100644 index fa73976..0000000 --- a/src/main/resources/sql/challenge-feeder/get_properties.sql +++ /dev/null @@ -1,8 +0,0 @@ -SELECT - pi.project_id as challengeId, - pi.project_info_type_id as propertyId, - pit.name as name, - pi.value as value - FROM project_info pi - INNER JOIN project_info_type_lu pit ON pit.project_info_type_id = pi.project_info_type_id - WHERE {filter} \ No newline at end of file diff --git a/src/main/resources/sql/challenge-feeder/get_resources.properties b/src/main/resources/sql/challenge-feeder/get_resources.properties deleted file mode 100644 index 57321c4..0000000 --- a/src/main/resources/sql/challenge-feeder/get_resources.properties +++ /dev/null @@ -1,2 +0,0 @@ -filter.challengeIds.template=r.project_id in () -filter.challengeIds.type=List \ No newline at end of file diff --git a/src/main/resources/sql/challenge-feeder/get_resources.sql b/src/main/resources/sql/challenge-feeder/get_resources.sql deleted file mode 100644 index 513e8d6..0000000 --- a/src/main/resources/sql/challenge-feeder/get_resources.sql +++ /dev/null @@ -1,17 +0,0 @@ -SELECT - r.project_id AS challengeId, - r.resource_id AS resourceId, - r.user_id AS userId, - rrl.name AS role, - r.project_phase_id AS projectPhaseId, - ri_handle.value AS handle, - ri_reg_date.value AS registrationDate, - decode(ri_rating.value, 'N/A', '0', ri_rating.value)::int AS rating, - ri_reliability.value::int AS reliability - FROM resource r - INNER JOIN resource_role_lu rrl ON r.resource_role_id = rrl.resource_role_id - INNER JOIN resource_info ri_handle ON ri_handle.resource_id = r.resource_id and ri_handle.resource_info_type_id = 2 - INNER JOIN resource_info ri_reg_date ON ri_reg_date.resource_id = r.resource_id and ri_reg_date.resource_info_type_id = 6 - LEFT JOIN resource_info ri_rating ON ri_rating.resource_id = r.resource_id and ri_rating.resource_info_type_id = 4 - LEFT JOIN resource_info ri_reliability ON ri_reliability.resource_id = r.resource_id and ri_reliability.resource_info_type_id = 5 - WHERE {filter} \ No newline at end of file diff --git a/src/main/resources/sql/challenge-feeder/get_reviews.properties b/src/main/resources/sql/challenge-feeder/get_reviews.properties deleted file mode 100644 index e77b5e0..0000000 --- a/src/main/resources/sql/challenge-feeder/get_reviews.properties +++ /dev/null @@ -1,2 +0,0 @@ -filter.challengeIds.template=pp.project_id in () -filter.challengeIds.type=List \ No newline at end of file diff --git a/src/main/resources/sql/challenge-feeder/get_reviews.sql b/src/main/resources/sql/challenge-feeder/get_reviews.sql deleted file mode 100644 index 244610a..0000000 --- a/src/main/resources/sql/challenge-feeder/get_reviews.sql +++ /dev/null @@ -1,19 +0,0 @@ -SELECT - pp.project_id as challengeId, - r.review_id as reviewId, - r.submission_id as submissionId, - r.initial_score as initialScore, - r.score, - submitter.user_id as submitterId, - submitter.handle as submitterHandle, - reviewer.user_id as reviewerId, - reviewer.handle as reviewerHandle - FROM review r - INNER JOIN submission s ON r.submission_id = s.submission_id - INNER JOIN upload u ON s.upload_id = u.upload_id - INNER JOIN resource rs_submitter ON u.resource_id = rs_submitter.resource_id AND rs_submitter.resource_role_id = 1 - INNER JOIN project_phase pp ON pp.project_phase_id = r.project_phase_id - INNER JOIN user submitter ON rs_submitter.user_id = submitter.user_id - INNER JOIN resource rs_reviewer ON r.resource_id = rs_reviewer.resource_id - INNER JOIN user reviewer ON rs_reviewer.user_id = reviewer.user_id - WHERE r.committed = 1 AND {filter} \ No newline at end of file diff --git a/src/main/resources/sql/challenge-feeder/get_submissions.properties b/src/main/resources/sql/challenge-feeder/get_submissions.properties deleted file mode 100644 index 8234dd0..0000000 --- a/src/main/resources/sql/challenge-feeder/get_submissions.properties +++ /dev/null @@ -1,2 +0,0 @@ -filter.challengeIds.template=u.project_id in () -filter.challengeIds.type=List \ No newline at end of file diff --git a/src/main/resources/sql/challenge-feeder/get_submissions.sql b/src/main/resources/sql/challenge-feeder/get_submissions.sql deleted file mode 100644 index c3de3b1..0000000 --- a/src/main/resources/sql/challenge-feeder/get_submissions.sql +++ /dev/null @@ -1,24 +0,0 @@ -SELECT - u.project_id as challengeId, - s.submission_id as submissionId, - s.create_date as submittedAt, - ssl.name as status, - s.placement as placement, - stl.name as submissionType, - s.create_user as submitterId, - usr.handle as submitter, - s.screening_score as screeningScore, - s.initial_score as initialScore, - s.final_score as finalScore - FROM - submission s, upload u, submission_status_lu ssl, submission_type_lu stl, user usr - WHERE - u.upload_id = s.upload_id - AND s.create_user = usr.user_id - AND s.submission_status_id = ssl.submission_status_id - AND s.submission_type_id = stl.submission_type_id - AND s.submission_status_id <> 5 - AND s.submission_type_id in (1,3) - AND u.upload_type_id = 1 - AND u.upload_status_id = 1 - AND {filter} \ No newline at end of file diff --git a/src/main/resources/sql/challenge-feeder/get_terms.properties b/src/main/resources/sql/challenge-feeder/get_terms.properties deleted file mode 100644 index 4688162..0000000 --- a/src/main/resources/sql/challenge-feeder/get_terms.properties +++ /dev/null @@ -1,2 +0,0 @@ -filter.challengeIds.template=pr.project_id in () -filter.challengeIds.type=List \ No newline at end of file diff --git a/src/main/resources/sql/challenge-feeder/get_terms.sql b/src/main/resources/sql/challenge-feeder/get_terms.sql deleted file mode 100644 index 2aa3b3a..0000000 --- a/src/main/resources/sql/challenge-feeder/get_terms.sql +++ /dev/null @@ -1,16 +0,0 @@ -SELECT - pr.project_id as challengeId, - tou.terms_of_use_id as termsOfUseId, - tou.title, - tou.url, - touat.name as agreeabilityType, - dtx.docusign_template_id as templateId, - rrl.name role - FROM project_role_terms_of_use_xref pr - INNER JOIN terms_of_use tou ON pr.terms_of_use_id = tou.terms_of_use_id - INNER JOIN common_oltp\:terms_of_use_agreeability_type_lu touat ON touat.terms_of_use_agreeability_type_id = tou.terms_of_use_agreeability_type_id - LEFT JOIN common_oltp\:terms_of_use_docusign_template_xref dtx ON dtx.terms_of_use_id = pr.terms_of_use_id - LEFT JOIN resource_role_lu rrl on rrl.resource_role_id = pr.resource_role_id - WHERE pr.resource_role_id IN (select resource_role_id from resource_role_lu) AND {filter} - - \ No newline at end of file diff --git a/src/main/resources/sql/challenge-feeder/get_user_ids.properties b/src/main/resources/sql/challenge-feeder/get_user_ids.properties deleted file mode 100644 index 88f7284..0000000 --- a/src/main/resources/sql/challenge-feeder/get_user_ids.properties +++ /dev/null @@ -1,2 +0,0 @@ -filter.challengeIds.template=p.project_id in () -filter.challengeIds.type=List \ No newline at end of file diff --git a/src/main/resources/sql/challenge-feeder/get_user_ids.sql b/src/main/resources/sql/challenge-feeder/get_user_ids.sql deleted file mode 100644 index e691dc4..0000000 --- a/src/main/resources/sql/challenge-feeder/get_user_ids.sql +++ /dev/null @@ -1,22 +0,0 @@ -SELECT distinct - p.project_id AS challengeId, - r.user_id AS userId, - r.user_id || - (SELECT CASE - WHEN q.count > 0 THEN 'T' - ELSE 'F' - END AS hasSubmittedForReview - FROM - (SELECT count(*) AS count - FROM upload u, - submission s, - RESOURCE rs - WHERE u.upload_id = s.upload_id - AND rs.user_id = r.user_id - AND rs.resource_role_id = 1 - AND rs.project_id = u.project_id - AND u.project_id = p.project_id - AND rs.resource_id = u.resource_id) AS q) AS hasUserSubmittedForReview - FROM RESOURCE r - INNER JOIN project p ON p.project_id = r.project_id - WHERE p.project_category_id NOT IN (27,37) AND {filter} \ No newline at end of file diff --git a/src/main/resources/sql/challenge-feeder/get_winners.properties b/src/main/resources/sql/challenge-feeder/get_winners.properties deleted file mode 100644 index 8234dd0..0000000 --- a/src/main/resources/sql/challenge-feeder/get_winners.properties +++ /dev/null @@ -1,2 +0,0 @@ -filter.challengeIds.template=u.project_id in () -filter.challengeIds.type=List \ No newline at end of file diff --git a/src/main/resources/sql/challenge-feeder/get_winners.sql b/src/main/resources/sql/challenge-feeder/get_winners.sql deleted file mode 100644 index 2bfdb88..0000000 --- a/src/main/resources/sql/challenge-feeder/get_winners.sql +++ /dev/null @@ -1,19 +0,0 @@ -SELECT - u.project_id as challengeId, - ri.value as userId, - user.handle as handle, - s.placement as placement, - CASE WHEN s.submission_type_id = 1 THEN CAST('final' AS VARCHAR(10)) ELSE CAST('checkpoint' AS VARCHAR(10)) END as submissionType, - (SELECT p.path || i.file_name - FROM informixoltp\:coder_image_xref mi - INNER JOIN informixoltp\:image i ON mi.image_id = i.image_id - INNER JOIN informixoltp\:path p ON p.path_id = i.path_id - WHERE mi.coder_id = ri.value AND mi.display_flag = 1 AND i.image_type_id = 1) as photoURL - FROM upload u - INNER JOIN submission s ON s.upload_id = u.upload_id - INNER JOIN prize p ON p.prize_id = s.prize_id - INNER JOIN resource_info ri ON ri.resource_id = u.resource_id - INNER JOIN user ON user.user_id = ri.value::DECIMAL(10,0) - WHERE resource_info_type_id = 1 - AND ((s.submission_type_id = 1 AND p.prize_type_id = 15) OR (s.submission_type_id = 3 AND p.prize_type_id = 14)) - AND {filter} \ No newline at end of file diff --git a/src/main/resources/sql/mm-feeder-into-challenges/get-terms.properties b/src/main/resources/sql/mm-feeder-into-challenges/get-terms.properties deleted file mode 100644 index f429aba..0000000 --- a/src/main/resources/sql/mm-feeder-into-challenges/get-terms.properties +++ /dev/null @@ -1,2 +0,0 @@ -filter.roundIds.template=round_id in () -filter.roundIds.type=List \ No newline at end of file diff --git a/src/main/resources/sql/mm-feeder-into-challenges/get-terms.sql b/src/main/resources/sql/mm-feeder-into-challenges/get-terms.sql deleted file mode 100644 index c819e6f..0000000 --- a/src/main/resources/sql/mm-feeder-into-challenges/get-terms.sql +++ /dev/null @@ -1,4 +0,0 @@ -select -round_id as challengeId -from informixoltp\:round_terms -where {filter} \ No newline at end of file diff --git a/src/main/resources/sql/mm-feeder-into-challenges/get_marathon_matches.properties b/src/main/resources/sql/mm-feeder-into-challenges/get_marathon_matches.properties deleted file mode 100644 index 786e905..0000000 --- a/src/main/resources/sql/mm-feeder-into-challenges/get_marathon_matches.properties +++ /dev/null @@ -1,2 +0,0 @@ -filter.roundIds.template=r.round_id in () -filter.roundIds.type=List \ No newline at end of file diff --git a/src/main/resources/sql/mm-feeder-into-challenges/get_marathon_matches.sql b/src/main/resources/sql/mm-feeder-into-challenges/get_marathon_matches.sql deleted file mode 100644 index 4c45140..0000000 --- a/src/main/resources/sql/mm-feeder-into-challenges/get_marathon_matches.sql +++ /dev/null @@ -1,36 +0,0 @@ -select -r.round_id as id, -r.contest_id as contestId, -comp.component_id as componentId, -c.name || ' - ' || r.name as name, -'DATA SCIENCE' as track, -'Marathon Match' as subTrack, -'mmatches' as type, -r.tc_direct_project_id as directProjectId, -tcdirect.name as directProjectName, -/* comp.component_text as marathonMatchDetailRequirements, */ -'system' as createdBy, -r.forum_id as forumId, -CASE - WHEN (r.status = 'P') THEN 'Completed' - WHEN (r.status = 'A') THEN 'Active' - WHEN (r.status = 'F') THEN 'Draft' - WHEN (r.status = 'X') THEN 'Deleted' - WHEN (r.status = 'T') THEN 'Active' - ELSE 'Open' - END as status, -rs_reg.start_time as registrationStartDate, -rs_reg.end_time as registrationEndDate, -rs_sub.end_time as submissionEndDate, -(select count(coder_id) from informixoltp\:round_registration where round_id = r.round_id) as numRegistrants, -(select sum(submission_number) from informixoltp\:long_component_state where round_id = r.round_id and status_id in(130, 131, 140, 150, 160)) as numSubmissions, -(select sum(amount) from informixoltp\:round_prize rp where rp.round_id = r.round_id) as totalPrize, -(select pi.project_id from project_info pi where pi.project_info_type_id = 56 and pi.value::decimal = r.round_id) is null as isLegacy -from informixoltp\:round r -left join informixoltp\:contest c on r.contest_id = c.contest_id -left join corporate_oltp\:tc_direct_project AS tcdirect ON r.tc_direct_project_id = tcdirect.project_id -left join informixoltp\:round_component as rc on rc.round_id = r.round_id -left join informixoltp\:component comp on comp.component_id = rc.component_id -left join informixoltp\:round_segment rs_reg on rs_reg.round_id = r.round_id and rs_reg.segment_id = 1 -left join informixoltp\:round_segment rs_sub on rs_sub.round_id = r.round_id and rs_sub.segment_id = 2 -where r.round_type_id = 13 and {filter} \ No newline at end of file diff --git a/src/main/resources/sql/mm-feeder-into-challenges/get_resources.properties b/src/main/resources/sql/mm-feeder-into-challenges/get_resources.properties deleted file mode 100644 index 862235c..0000000 --- a/src/main/resources/sql/mm-feeder-into-challenges/get_resources.properties +++ /dev/null @@ -1,2 +0,0 @@ -filter.roundIds.template=rr.round_id in () -filter.roundIds.type=List \ No newline at end of file diff --git a/src/main/resources/sql/mm-feeder-into-challenges/get_resources.sql b/src/main/resources/sql/mm-feeder-into-challenges/get_resources.sql deleted file mode 100644 index b95fd5f..0000000 --- a/src/main/resources/sql/mm-feeder-into-challenges/get_resources.sql +++ /dev/null @@ -1,14 +0,0 @@ -select -rr.round_id as challengeId, -rr.coder_id as userId, -rr.timestamp as registrationDate, -user.handle as handle, -'Submitter' as role, -ar.rating, -(select count(*) from informixoltp\:long_component_state lcs, informixoltp\:long_submission ls - where lcs.long_component_state_id = ls.long_component_state_id - and lcs.round_id = rr.round_id and lcs.coder_id = rr.coder_id and ls.example = 0) as submissionCount -from informixoltp\:round_registration rr -left join user on user.user_id = coder_id -left join informixoltp\:algo_rating ar on ar.coder_id = rr.coder_id and ar.algo_rating_type_id=3 -where {filter} \ No newline at end of file diff --git a/src/main/resources/sql/mm-feeder-into-challenges/get_submissions.properties b/src/main/resources/sql/mm-feeder-into-challenges/get_submissions.properties deleted file mode 100644 index 7af166e..0000000 --- a/src/main/resources/sql/mm-feeder-into-challenges/get_submissions.properties +++ /dev/null @@ -1,2 +0,0 @@ -filter.roundIds.template=lcs.round_id in () -filter.roundIds.type=List \ No newline at end of file diff --git a/src/main/resources/sql/mm-feeder-into-challenges/get_submissions.sql b/src/main/resources/sql/mm-feeder-into-challenges/get_submissions.sql deleted file mode 100644 index 91b4857..0000000 --- a/src/main/resources/sql/mm-feeder-into-challenges/get_submissions.sql +++ /dev/null @@ -1,15 +0,0 @@ -select -lcs.round_id as challengeId, -'Final' as submissionType, -user.handle as submitter, -lcs.coder_id as submitterId, -ls.submission_points as finalScore, -ls.submission_points as initialScore, -extend(dbinfo("UTC_TO_DATETIME",ls.submit_time/1000), year to fraction) as submittedAt, --1 as placement, --1 as screeningScore, -'Active' as status -from informixoltp\:long_submission ls -left join informixoltp\:long_component_state lcs on lcs.long_component_state_id = ls.long_component_state_id -left join user on user.user_id = lcs.coder_id -where {filter} \ No newline at end of file