public static SparkTask createSparkTask(SparkWork work, HiveConf conf) { return (SparkTask) TaskFactory.get(work); }
@SuppressWarnings("unchecked") @VisibleForTesting static <T extends Serializable> Task<T> get(Class<T> workClass) { for (TaskTuple<? extends Serializable> t : taskvec) { if (t.workClass == workClass) { try { Task<T> ret = (Task<T>) t.taskClass.newInstance(); ret.setId("Stage-" + Integer.toString(getAndIncrementId())); return ret; } catch (Exception e) { throw new RuntimeException(e); } } } throw new RuntimeException("No task for work class " + workClass.getName()); }
private void createInsertDesc(Table table, boolean overwrite) { Task<? extends Serializable>[] tasks = new Task[this.rootTasks.size()]; tasks = this.rootTasks.toArray(tasks); PreInsertTableDesc preInsertTableDesc = new PreInsertTableDesc(table, overwrite); InsertTableDesc insertTableDesc = new InsertTableDesc(table, overwrite); this.rootTasks .add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), preInsertTableDesc), conf)); TaskFactory .getAndMakeChild(new DDLWork(getInputs(), getOutputs(), insertTableDesc), conf, tasks); }
TaskFactory.resetId();
TaskFactory.resetId();
public static <T extends Serializable> Task<T> get(T work) { return get(work, null); }
@SuppressWarnings("unchecked") public static <T extends Serializable> Task<T> get(Class<T> workClass, HiveConf conf) { for (TaskTuple<? extends Serializable> t : taskvec) { if (t.workClass == workClass) { try { Task<T> ret = (Task<T>) t.taskClass.newInstance(); ret.setId("Stage-" + Integer.toString(getAndIncrementId())); return ret; } catch (Exception e) { throw new RuntimeException(e); } } } throw new RuntimeException("No task for work class " + workClass.getName()); }
TaskFactory.resetId();
private static Task<MapredWork> getMapredWork() { return TaskFactory.get(MapredWork.class); }
@SuppressWarnings("unchecked") public static <T extends Serializable> Task<T> get(Class<T> workClass, HiveConf conf) { for (TaskTuple<? extends Serializable> t : taskvec) { if (t.workClass == workClass) { try { Task<T> ret = (Task<T>) t.taskClass.newInstance(); ret.setId("Stage-" + Integer.toString(getAndIncrementId())); return ret; } catch (Exception e) { throw new RuntimeException(e); } } } throw new RuntimeException("No task for work class " + workClass.getName()); }
TaskFactory.resetId();
private static Task<? extends Serializable> alterDbTask(String dbName, Map<String, String> props, HiveConf hiveConf) { AlterDatabaseDesc alterDbDesc = new AlterDatabaseDesc(dbName, props, null); DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc); return TaskFactory.get(work, hiveConf); }
@SuppressWarnings("unchecked") public static <T extends Serializable> Task<T> get(Class<T> workClass, HiveConf conf) { for (taskTuple<? extends Serializable> t : taskvec) { if (t.workClass == workClass) { try { Task<T> ret = (Task<T>) t.taskClass.newInstance(); ret.setId("Stage-" + Integer.toString(getAndIncrementId())); return ret; } catch (Exception e) { throw new RuntimeException(e); } } } throw new RuntimeException("No task for work class " + workClass.getName()); }
public Task<? extends Serializable> getCreateTableTask(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs, HiveConf conf) { switch (getDescType()) { case TABLE: return TaskFactory.get(new DDLWork(inputs, outputs, createTblDesc), conf); case VIEW: return TaskFactory.get(new DDLWork(inputs, outputs, createViewDesc), conf); } return null; }