();
- for (Object obj : selection.toList()) {
- if (clazz.isAssignableFrom(obj.getClass())) {
- list.add((T) obj);
- }
- }
- return list;
- }
-
- private static int computeUploadWork(File file) {
- if (file.isDirectory()) {
- int contentWork = 1;
- for (File child : file.listFiles())
- contentWork += computeUploadWork(child);
- return contentWork;
-
- } else if (file.isFile()) {
- return 1 + (int) (file.length() / 1024);
-
- } else {
- return 0;
- }
- }
-
-}
-
-/**
- * Adapter to allow the viewing of a DfsFile in the Editor window
- */
-class DFSFileEditorInput extends PlatformObject implements
- IStorageEditorInput {
-
- private DFSFile file;
-
- /**
- * Constructor
- *
- * @param file
- */
- DFSFileEditorInput(DFSFile file) {
- this.file = file;
- }
-
- /* @inheritDoc */
- public String getToolTipText() {
- return file.toDetailedString();
- }
-
- /* @inheritDoc */
- public IPersistableElement getPersistable() {
- return null;
- }
-
- /* @inheritDoc */
- public String getName() {
- return file.toString();
- }
-
- /* @inheritDoc */
- public ImageDescriptor getImageDescriptor() {
- return ImageLibrary.get("dfs.file.editor");
- }
-
- /* @inheritDoc */
- public boolean exists() {
- return true;
- }
-
- /* @inheritDoc */
- public IStorage getStorage() throws CoreException {
- return file.getIStorage();
- }
-};
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/EditLocationAction.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/EditLocationAction.java
deleted file mode 100644
index cdfbe93474d..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/EditLocationAction.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.actions;
-
-import org.apache.hadoop.eclipse.ImageLibrary;
-import org.apache.hadoop.eclipse.server.HadoopServer;
-import org.apache.hadoop.eclipse.servers.HadoopLocationWizard;
-import org.apache.hadoop.eclipse.view.servers.ServerView;
-import org.eclipse.jface.action.Action;
-import org.eclipse.jface.wizard.Wizard;
-import org.eclipse.jface.wizard.WizardDialog;
-
-/**
- * Editing server properties action
- */
-public class EditLocationAction extends Action {
-
- private ServerView serverView;
-
- public EditLocationAction(ServerView serverView) {
- this.serverView = serverView;
-
- setText("Edit Hadoop location...");
- setImageDescriptor(ImageLibrary.get("server.view.action.location.edit"));
- }
-
- @Override
- public void run() {
-
- final HadoopServer server = serverView.getSelectedServer();
- if (server == null)
- return;
-
- WizardDialog dialog = new WizardDialog(null, new Wizard() {
- private HadoopLocationWizard page = new HadoopLocationWizard(server);
-
- @Override
- public void addPages() {
- super.addPages();
- setWindowTitle("Edit Hadoop location...");
- addPage(page);
- }
-
- @Override
- public boolean performFinish() {
- page.performFinish();
- return true;
- }
- });
-
- dialog.create();
- dialog.setBlockOnOpen(true);
- dialog.open();
-
- super.run();
- }
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/NewLocationAction.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/NewLocationAction.java
deleted file mode 100644
index 5db0bc56daa..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/NewLocationAction.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.actions;
-
-import org.apache.hadoop.eclipse.ImageLibrary;
-import org.apache.hadoop.eclipse.servers.HadoopLocationWizard;
-import org.eclipse.jface.action.Action;
-import org.eclipse.jface.wizard.Wizard;
-import org.eclipse.jface.wizard.WizardDialog;
-
-
-/**
- * Action corresponding to creating a new MapReduce Server.
- */
-
-public class NewLocationAction extends Action {
- public NewLocationAction() {
- setText("New Hadoop location...");
- setImageDescriptor(ImageLibrary.get("server.view.action.location.new"));
- }
-
- @Override
- public void run() {
- WizardDialog dialog = new WizardDialog(null, new Wizard() {
- private HadoopLocationWizard page = new HadoopLocationWizard();
-
- @Override
- public void addPages() {
- super.addPages();
- setWindowTitle("New Hadoop location...");
- addPage(page);
- }
-
- @Override
- public boolean performFinish() {
- page.performFinish();
- return true;
- }
-
- });
-
- dialog.create();
- dialog.setBlockOnOpen(true);
- dialog.open();
-
- super.run();
- }
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRClassWizardAction.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRClassWizardAction.java
deleted file mode 100644
index cc1f9ecb6cf..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRClassWizardAction.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.actions;
-
-import java.util.logging.Logger;
-
-import org.apache.hadoop.eclipse.NewDriverWizard;
-import org.apache.hadoop.eclipse.NewMapperWizard;
-import org.apache.hadoop.eclipse.NewReducerWizard;
-import org.eclipse.jface.action.Action;
-import org.eclipse.jface.viewers.StructuredSelection;
-import org.eclipse.jface.window.Window;
-import org.eclipse.jface.wizard.WizardDialog;
-import org.eclipse.ui.INewWizard;
-import org.eclipse.ui.IWorkbench;
-import org.eclipse.ui.PlatformUI;
-import org.eclipse.ui.cheatsheets.ICheatSheetAction;
-import org.eclipse.ui.cheatsheets.ICheatSheetManager;
-
-
-/**
- * Action to open a new MapReduce Class.
- */
-
-public class OpenNewMRClassWizardAction extends Action implements
- ICheatSheetAction {
-
- static Logger log = Logger.getLogger(OpenNewMRClassWizardAction.class
- .getName());
-
- public void run(String[] params, ICheatSheetManager manager) {
-
- if ((params != null) && (params.length > 0)) {
- IWorkbench workbench = PlatformUI.getWorkbench();
- INewWizard wizard = getWizard(params[0]);
- wizard.init(workbench, new StructuredSelection());
- WizardDialog dialog = new WizardDialog(PlatformUI.getWorkbench()
- .getActiveWorkbenchWindow().getShell(), wizard);
- dialog.create();
- dialog.open();
-
- // did the wizard succeed ?
- notifyResult(dialog.getReturnCode() == Window.OK);
- }
- }
-
- private INewWizard getWizard(String typeName) {
- if (typeName.equals("Mapper")) {
- return new NewMapperWizard();
- } else if (typeName.equals("Reducer")) {
- return new NewReducerWizard();
- } else if (typeName.equals("Driver")) {
- return new NewDriverWizard();
- } else {
- log.severe("Invalid Wizard requested");
- return null;
- }
- }
-
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRProjectAction.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRProjectAction.java
deleted file mode 100644
index c7fde10c09a..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRProjectAction.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.actions;
-
-import org.apache.hadoop.eclipse.NewMapReduceProjectWizard;
-import org.eclipse.jface.action.Action;
-import org.eclipse.jface.viewers.StructuredSelection;
-import org.eclipse.jface.window.Window;
-import org.eclipse.jface.wizard.WizardDialog;
-import org.eclipse.swt.widgets.Shell;
-import org.eclipse.ui.IWorkbench;
-import org.eclipse.ui.PlatformUI;
-
-/**
- * Action to open a new Map/Reduce project.
- */
-
-public class OpenNewMRProjectAction extends Action {
-
- @Override
- public void run() {
- IWorkbench workbench = PlatformUI.getWorkbench();
- Shell shell = workbench.getActiveWorkbenchWindow().getShell();
- NewMapReduceProjectWizard wizard = new NewMapReduceProjectWizard();
- wizard.init(workbench, new StructuredSelection());
- WizardDialog dialog = new WizardDialog(shell, wizard);
- dialog.create();
- dialog.open();
- // did the wizard succeed?
- notifyResult(dialog.getReturnCode() == Window.OK);
- }
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/ActionProvider.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/ActionProvider.java
deleted file mode 100644
index 65436ac106a..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/ActionProvider.java
+++ /dev/null
@@ -1,193 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.dfs;
-
-import org.apache.hadoop.eclipse.ImageLibrary;
-import org.apache.hadoop.eclipse.actions.DFSActionImpl;
-import org.eclipse.jface.action.Action;
-import org.eclipse.jface.action.IMenuManager;
-import org.eclipse.jface.resource.ImageDescriptor;
-import org.eclipse.jface.viewers.ISelection;
-import org.eclipse.jface.viewers.IStructuredSelection;
-import org.eclipse.ui.IActionBars;
-import org.eclipse.ui.PlatformUI;
-import org.eclipse.ui.actions.ActionFactory;
-import org.eclipse.ui.navigator.CommonActionProvider;
-import org.eclipse.ui.navigator.ICommonActionConstants;
-import org.eclipse.ui.navigator.ICommonActionExtensionSite;
-import org.eclipse.ui.navigator.ICommonMenuConstants;
-
-/**
- * Allows the user to delete and refresh items in the DFS tree
- */
-
-public class ActionProvider extends CommonActionProvider {
-
- private static ICommonActionExtensionSite site;
-
- public ActionProvider() {
- }
-
- /* @inheritDoc */
- @Override
- public void init(ICommonActionExtensionSite site) {
- if (ActionProvider.site != null) {
- System.err.printf("%s: Multiple init()\n", this.getClass()
- .getCanonicalName());
- return;
- }
- super.init(site);
- ActionProvider.site = site;
- }
-
- /* @inheritDoc */
- @Override
- public void fillActionBars(IActionBars actionBars) {
- actionBars.setGlobalActionHandler(ActionFactory.DELETE.getId(),
- new DFSAction(DFSActions.DELETE));
- actionBars.setGlobalActionHandler(ActionFactory.REFRESH.getId(),
- new DFSAction(DFSActions.REFRESH));
-
- if (site == null)
- return;
-
- if ((site.getStructuredViewer().getSelection() instanceof IStructuredSelection)
- && (((IStructuredSelection) site.getStructuredViewer()
- .getSelection()).size() == 1)
- && (((IStructuredSelection) site.getStructuredViewer()
- .getSelection()).getFirstElement() instanceof DFSFile)) {
-
- actionBars.setGlobalActionHandler(ICommonActionConstants.OPEN,
- new DFSAction(DFSActions.OPEN));
- }
-
- actionBars.updateActionBars();
- }
-
- /* @inheritDoc */
- @Override
- public void fillContextMenu(IMenuManager menu) {
- /*
- * Actions on multiple selections
- */
- menu.appendToGroup(ICommonMenuConstants.GROUP_EDIT, new DFSAction(
- DFSActions.DELETE));
-
- menu.appendToGroup(ICommonMenuConstants.GROUP_OPEN, new DFSAction(
- DFSActions.REFRESH));
-
- menu.appendToGroup(ICommonMenuConstants.GROUP_NEW, new DFSAction(
- DFSActions.DOWNLOAD));
-
- if (site == null)
- return;
-
- ISelection isel = site.getStructuredViewer().getSelection();
- if (!(isel instanceof IStructuredSelection))
- return;
-
- /*
- * Actions on single selections only
- */
-
- IStructuredSelection issel = (IStructuredSelection) isel;
- if (issel.size() != 1)
- return;
- Object element = issel.getFirstElement();
-
- if (element instanceof DFSFile) {
- menu.appendToGroup(ICommonMenuConstants.GROUP_OPEN, new DFSAction(
- DFSActions.OPEN));
-
- } else if (element instanceof DFSFolder) {
- menu.appendToGroup(ICommonMenuConstants.GROUP_NEW, new DFSAction(
- DFSActions.MKDIR));
- menu.appendToGroup(ICommonMenuConstants.GROUP_NEW, new DFSAction(
- DFSActions.UPLOAD_FILES));
- menu.appendToGroup(ICommonMenuConstants.GROUP_NEW, new DFSAction(
- DFSActions.UPLOAD_DIR));
-
- } else if (element instanceof DFSLocation) {
- menu.appendToGroup(ICommonMenuConstants.GROUP_OPEN, new DFSAction(
- DFSActions.RECONNECT));
-
- } else if (element instanceof DFSLocationsRoot) {
- menu.appendToGroup(ICommonMenuConstants.GROUP_OPEN, new DFSAction(
- DFSActions.DISCONNECT));
- }
-
- }
-
- /**
- * Representation of an action on a DFS entry in the browser
- */
- public static class DFSAction extends Action {
-
- private final String id;
-
- private final String title;
-
- private DFSActions action;
-
- public DFSAction(String id, String title) {
- this.id = id;
- this.title = title;
- }
-
- public DFSAction(DFSActions action) {
- this.id = action.id;
- this.title = action.title;
- }
-
- /* @inheritDoc */
- @Override
- public String getText() {
- return this.title;
- }
-
- /* @inheritDoc */
- @Override
- public ImageDescriptor getImageDescriptor() {
- return ImageLibrary.get(getActionDefinitionId());
- }
-
- /* @inheritDoc */
- @Override
- public String getActionDefinitionId() {
- return id;
- }
-
- /* @inheritDoc */
- @Override
- public void run() {
- DFSActionImpl action = new DFSActionImpl();
- action.setActivePart(this, PlatformUI.getWorkbench()
- .getActiveWorkbenchWindow().getActivePage().getActivePart());
- action.selectionChanged(this, site.getStructuredViewer()
- .getSelection());
- action.run(this);
- }
-
- /* @inheritDoc */
- @Override
- public boolean isEnabled() {
- return true;
- }
- }
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSActions.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSActions.java
deleted file mode 100644
index 038497ae893..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSActions.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.dfs;
-
-public enum DFSActions {
-
- DELETE("Delete"), REFRESH("Refresh"), DOWNLOAD("Download from DFS..."), OPEN(
- "View"), MKDIR("Create new directory..."), UPLOAD_FILES(
- "Upload files to DFS..."), UPLOAD_DIR("Upload directory to DFS..."), RECONNECT(
- "Reconnect"), DISCONNECT("Disconnect");
-
- final String title;
-
- final String id;
-
- private static final String PREFIX = "dfs.browser.action.";
-
- public static DFSActions getById(String def) {
- if (!def.startsWith(PREFIX))
- return null;
- return valueOf(def.substring(PREFIX.length()).toUpperCase());
- }
-
- DFSActions(String title) {
- this.title = title;
- this.id = PREFIX + this.name().toLowerCase();
- }
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSContent.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSContent.java
deleted file mode 100644
index bea94d53697..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSContent.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.dfs;
-
-/**
- * Interface to define content entities in the DFS browser
- */
-public interface DFSContent {
-
- boolean hasChildren();
-
- DFSContent[] getChildren();
-
- void refresh();
-
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSContentProvider.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSContentProvider.java
deleted file mode 100644
index fca7d46916b..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSContentProvider.java
+++ /dev/null
@@ -1,244 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.dfs;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.hadoop.eclipse.ImageLibrary;
-import org.apache.hadoop.eclipse.server.HadoopServer;
-import org.apache.hadoop.eclipse.servers.ServerRegistry;
-import org.eclipse.jface.viewers.ILabelProvider;
-import org.eclipse.jface.viewers.ILabelProviderListener;
-import org.eclipse.jface.viewers.ITreeContentProvider;
-import org.eclipse.jface.viewers.StructuredViewer;
-import org.eclipse.jface.viewers.Viewer;
-import org.eclipse.swt.graphics.Image;
-import org.eclipse.swt.widgets.Display;
-
-/**
- * Handles viewing of DFS locations
- *
- *
- * The content handled by this provider is a tree:
- *
- *
- *
DFSLocationsRoot
- *
\_HadoopServer
- *
| \_DfsFolder
- *
| | \_DfsFile
- *
| \_DfsFolder
- *
| ...
- *
\_HadoopServer...
- *
- *
- * The code should not block here: blocking operations need to be done
- * asynchronously so as not to freeze the UI!
- */
-public class DFSContentProvider implements ITreeContentProvider,
- ILabelProvider {
-
- /**
- * The viewer that displays this Tree content
- */
- private Viewer viewer;
-
- private StructuredViewer sviewer;
-
- private Map rootFolders =
- new HashMap();
-
- /**
- * Constructor: load resources (icons).
- */
- public DFSContentProvider() {
- }
-
- private final DFSLocationsRoot locationsRoot = new DFSLocationsRoot(this);
-
- /*
- * ITreeContentProvider implementation
- */
-
- /* @inheritDoc */
- public Object[] getChildren(Object parent) {
-
- if (!(parent instanceof DFSContent))
- return null;
- DFSContent content = (DFSContent) parent;
- return content.getChildren();
- }
-
- public Object[] test(Object parentElement) {
- if (parentElement instanceof DFSLocationsRoot) {
- return ServerRegistry.getInstance().getServers().toArray();
-
- } else if (parentElement instanceof HadoopServer) {
- final HadoopServer location = (HadoopServer) parentElement;
- Object root = rootFolders.get(location);
- if (root != null)
- return new Object[] { root };
-
- return new Object[] { "Connecting to DFS..." };
-
- } else if (parentElement instanceof DFSFolder) {
- DFSFolder folder = (DFSFolder) parentElement;
- return folder.getChildren();
- }
-
- return new Object[] { "" };
- }
-
- /* @inheritDoc */
- public Object getParent(Object element) {
-
- if (element instanceof DFSPath) {
- return ((DFSPath) element).getParent();
-
- } else if (element instanceof HadoopServer) {
- return locationsRoot;
- }
-
- return null;
- }
-
- /* @inheritDoc */
- public boolean hasChildren(Object element) {
- if (element instanceof DFSContent) {
- DFSContent content = (DFSContent) element;
- return content.hasChildren();
- }
- return false;
- }
-
- /*
- * IStructureContentProvider implementation
- */
-
- /* @inheritDoc */
- public Object[] getElements(final Object inputElement) {
- return new Object[] { locationsRoot };
- // return ServerRegistry.getInstance().getServers().toArray();
- }
-
- /*
- * ILabelProvider implementation
- */
-
- /* @inheritDoc */
- public Image getImage(Object element) {
- if (element instanceof DFSLocationsRoot)
- return ImageLibrary.getImage("dfs.browser.root.entry");
-
- else if (element instanceof DFSLocation)
- return ImageLibrary.getImage("dfs.browser.location.entry");
-
- else if (element instanceof DFSFolder)
- return ImageLibrary.getImage("dfs.browser.folder.entry");
-
- else if (element instanceof DFSFile)
- return ImageLibrary.getImage("dfs.browser.file.entry");
-
- return null;
- }
-
- /* @inheritDoc */
- public String getText(Object element) {
- if (element instanceof DFSFile)
- return ((DFSFile) element).toDetailedString();
-
- return element.toString();
- }
-
- /*
- * IBaseLabelProvider implementation
- */
-
- /* @inheritDoc */
- public void addListener(ILabelProviderListener listener) {
- }
-
- /* @inheritDoc */
- public void removeListener(ILabelProviderListener listener) {
- }
-
- /* @inheritDoc */
- public boolean isLabelProperty(Object element, String property) {
- return false;
- }
-
- /*
- * IContentProvider implementation
- */
-
- /* @inheritDoc */
- public void dispose() {
- }
-
- /* @inheritDoc */
- public void inputChanged(Viewer viewer, Object oldInput, Object newInput) {
- this.viewer = viewer;
- if ((viewer != null) && (viewer instanceof StructuredViewer))
- this.sviewer = (StructuredViewer) viewer;
- else
- this.sviewer = null;
- }
-
- /*
- * Miscellaneous
- */
-
- /**
- * Ask the viewer for this content to refresh
- */
- void refresh() {
- // no display, nothing to update
- if (this.viewer == null)
- return;
-
- Display.getDefault().asyncExec(new Runnable() {
- public void run() {
- DFSContentProvider.this.viewer.refresh();
- }
- });
- }
-
- /**
- * Ask the viewer to refresh a single element
- *
- * @param content what to refresh
- */
- void refresh(final DFSContent content) {
- if (this.sviewer != null) {
- Display.getDefault().asyncExec(new Runnable() {
- public void run() {
- DFSContentProvider.this.sviewer.refresh(content);
- }
- });
-
- } else {
- refresh();
- }
- }
-
- Viewer getViewer() {
- return this.viewer;
- }
-
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSFile.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSFile.java
deleted file mode 100644
index af8e6c183b1..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSFile.java
+++ /dev/null
@@ -1,350 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.dfs;
-
-import java.io.BufferedInputStream;
-import java.io.BufferedOutputStream;
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.lang.reflect.InvocationTargetException;
-
-import org.apache.hadoop.eclipse.Activator;
-import org.apache.hadoop.eclipse.ErrorMessageDialog;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.Path;
-import org.eclipse.core.resources.IStorage;
-import org.eclipse.core.runtime.CoreException;
-import org.eclipse.core.runtime.IPath;
-import org.eclipse.core.runtime.IProgressMonitor;
-import org.eclipse.core.runtime.PlatformObject;
-import org.eclipse.core.runtime.Status;
-import org.eclipse.jface.dialogs.MessageDialog;
-import org.eclipse.jface.operation.IRunnableWithProgress;
-import org.eclipse.ui.PlatformUI;
-
-/**
- * File handling methods for the DFS
- */
-public class DFSFile extends DFSPath implements DFSContent {
-
- protected long length;
-
- protected short replication;
-
- /**
- * Constructor to upload a file on the distributed file system
- *
- * @param parent
- * @param path
- * @param file
- * @param monitor
- */
- public DFSFile(DFSPath parent, Path path, File file,
- IProgressMonitor monitor) {
-
- super(parent, path);
- this.upload(monitor, file);
- }
-
- public DFSFile(DFSPath parent, Path path) {
- super(parent, path);
-
- try {
- FileStatus fs = getDFS().getFileStatus(path);
- this.length = fs.getLen();
- this.replication = fs.getReplication();
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
-
- /**
- * Download and view contents of a file
- *
- * @return a InputStream for the file
- */
- public InputStream open() throws IOException {
-
- return getDFS().open(this.path);
- }
-
- /**
- * Download this file to the local file system. This creates a download
- * status monitor.
- *
- * @param file
- * @throws JSchException
- * @throws IOException
- * @throws InvocationTargetException
- * @throws InterruptedException
- *
- * @deprecated
- */
- public void downloadToLocalFile(final File file)
- throws InvocationTargetException, InterruptedException {
-
- PlatformUI.getWorkbench().getProgressService().busyCursorWhile(
- new IRunnableWithProgress() {
- public void run(IProgressMonitor monitor)
- throws InvocationTargetException {
-
- DFSFile.this.downloadToLocalFile(monitor, file);
- }
- });
- }
-
- /* @inheritDoc */
- @Override
- public void downloadToLocalDirectory(IProgressMonitor monitor, File dir) {
-
- File dfsPath = new File(this.getPath().toString());
- File destination = new File(dir, dfsPath.getName());
-
- if (destination.exists()) {
- boolean answer =
- MessageDialog.openQuestion(null, "Overwrite existing local file?",
- "The file you are attempting to download from the DFS "
- + this.getPath()
- + ", already exists in your local directory as "
- + destination + ".\n" + "Overwrite the existing file?");
- if (!answer)
- return;
- }
-
- try {
- this.downloadToLocalFile(monitor, destination);
-
- } catch (Exception e) {
- e.printStackTrace();
- MessageDialog.openWarning(null, "Download to local file system",
- "Downloading of file \"" + this.path + "\" to local directory \""
- + dir + "\" has failed.\n" + e);
- }
- }
-
- /**
- * Provides a detailed string for this file
- *
- * @return the string formatted as
- * <filename> (<size>, r<replication>)
- */
- public String toDetailedString() {
- final String[] units = { "b", "Kb", "Mb", "Gb", "Tb" };
- int unit = 0;
- double l = this.length;
- while ((l >= 1024.0) && (unit < units.length)) {
- unit += 1;
- l /= 1024.0;
- }
-
- return String.format("%s (%.1f %s, r%d)", super.toString(), l,
- units[unit], this.replication);
- }
-
- /* @inheritDoc */
- @Override
- public String toString() {
- return this.path.toString();
- }
-
- /*
- *
- */
-
- /**
- * Download the DfsFile to a local file. Use the given monitor to report
- * status of operation.
- *
- * @param monitor the status monitor
- * @param file the local file where to put the downloaded file
- * @throws InvocationTargetException
- */
- public void downloadToLocalFile(IProgressMonitor monitor, File file)
- throws InvocationTargetException {
-
- final int taskSize = 1024;
-
- monitor.setTaskName("Download file " + this.path);
-
- BufferedOutputStream ostream = null;
- DataInputStream istream = null;
-
- try {
- istream = getDFS().open(this.path);
- ostream = new BufferedOutputStream(new FileOutputStream(file));
-
- int bytes;
- byte[] buffer = new byte[taskSize];
-
- while ((bytes = istream.read(buffer)) >= 0) {
- if (monitor.isCanceled())
- return;
- ostream.write(buffer, 0, bytes);
- monitor.worked(1);
- }
-
- } catch (Exception e) {
- throw new InvocationTargetException(e);
-
- } finally {
- // Clean all opened resources
- if (istream != null) {
- try {
- istream.close();
- } catch (IOException e) {
- e.printStackTrace();
- // nothing we can do here
- }
- }
- try {
- ostream.close();
- } catch (IOException e) {
- e.printStackTrace();
- // nothing we can do here
- }
- }
- }
-
- /**
- * Upload a local file to this file on the distributed file system
- *
- * @param monitor
- * @param file
- */
- public void upload(IProgressMonitor monitor, File file) {
-
- final int taskSize = 1024;
-
- monitor.setTaskName("Upload file " + this.path);
-
- BufferedInputStream istream = null;
- DataOutputStream ostream = null;
-
- try {
- istream = new BufferedInputStream(new FileInputStream(file));
- ostream = getDFS().create(this.path);
-
- int bytes;
- byte[] buffer = new byte[taskSize];
-
- while ((bytes = istream.read(buffer)) >= 0) {
- if (monitor.isCanceled())
- return;
- ostream.write(buffer, 0, bytes);
- monitor.worked(1);
- }
-
- } catch (Exception e) {
- ErrorMessageDialog.display(String.format(
- "Unable to uploade file %s to %s", file, this.path), e
- .getLocalizedMessage());
-
- } finally {
- try {
- if (istream != null)
- istream.close();
- } catch (IOException e) {
- e.printStackTrace();
- // nothing we can do here
- }
- try {
- if (ostream != null)
- ostream.close();
- } catch (IOException e) {
- e.printStackTrace();
- // nothing we can do here
- }
- }
- }
-
- /* @inheritDoc */
- @Override
- public void refresh() {
- getParent().refresh();
- }
-
- /* @inheritDoc */
- @Override
- public int computeDownloadWork() {
- return 1 + (int) (this.length / 1024);
- }
-
- /**
- * Creates an adapter for the file to open it in the Editor
- *
- * @return the IStorage
- */
- public IStorage getIStorage() {
- return new IStorageAdapter();
- }
-
- /**
- * IStorage adapter to open the file in the Editor
- */
- private class IStorageAdapter extends PlatformObject implements IStorage {
-
- /* @inheritDoc */
- public InputStream getContents() throws CoreException {
- try {
- return DFSFile.this.open();
-
- } catch (IOException ioe) {
- throw new CoreException(new Status(Status.ERROR,
- Activator.PLUGIN_ID, 0, "Unable to open file \""
- + DFSFile.this.path + "\"", ioe));
- }
- }
-
- /* @inheritDoc */
- public IPath getFullPath() {
- return new org.eclipse.core.runtime.Path(DFSFile.this.path.toString());
- }
-
- /* @inheritDoc */
- public String getName() {
- return DFSFile.this.path.getName();
- }
-
- /* @inheritDoc */
- public boolean isReadOnly() {
- return true;
- }
-
- }
-
- /*
- * Implementation of DFSContent
- */
-
- /* @inheritDoc */
- public DFSContent[] getChildren() {
- return null;
- }
-
- /* @inheritDoc */
- public boolean hasChildren() {
- return false;
- }
-
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSFolder.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSFolder.java
deleted file mode 100644
index 7dc72a7bf3b..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSFolder.java
+++ /dev/null
@@ -1,213 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.dfs;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.logging.Logger;
-
-import org.apache.hadoop.eclipse.server.HadoopServer;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.Path;
-import org.eclipse.core.runtime.IProgressMonitor;
-import org.eclipse.core.runtime.IStatus;
-import org.eclipse.core.runtime.Status;
-import org.eclipse.core.runtime.jobs.Job;
-import org.eclipse.jface.dialogs.MessageDialog;
-
-/**
- * Local representation of a folder in the DFS.
- *
- * The constructor creates an empty representation of the folder and spawn a
- * thread that will fill
- */
-public class DFSFolder extends DFSPath implements DFSContent {
-
- static Logger log = Logger.getLogger(DFSFolder.class.getName());
-
- private DFSContent[] children;
-
- protected DFSFolder(DFSContentProvider provider, HadoopServer location)
- throws IOException {
-
- super(provider, location);
- }
-
- private DFSFolder(DFSPath parent, Path path) {
- super(parent, path);
- }
-
- protected void loadDFSFolderChildren() throws IOException {
- List list = new ArrayList();
-
- for (FileStatus status : getDFS().listStatus(this.getPath())) {
- if (status.isDir()) {
- list.add(new DFSFolder(this, status.getPath()));
- } else {
- list.add(new DFSFile(this, status.getPath()));
- }
- }
-
- this.children = list.toArray(new DFSContent[list.size()]);
- }
-
- /**
- * Upload the given file or directory into this DfsFolder
- *
- * @param file
- * @throws IOException
- */
- public void upload(IProgressMonitor monitor, final File file)
- throws IOException {
-
- if (file.isDirectory()) {
- Path filePath = new Path(this.path, file.getName());
- getDFS().mkdirs(filePath);
- DFSFolder newFolder = new DFSFolder(this, filePath);
- monitor.worked(1);
- for (File child : file.listFiles()) {
- if (monitor.isCanceled())
- return;
- newFolder.upload(monitor, child);
- }
-
- } else if (file.isFile()) {
- Path filePath = new Path(this.path, file.getName());
- DFSFile newFile = new DFSFile(this, filePath, file, monitor);
-
- } else {
- // XXX don't know what the file is?
- }
- }
-
- /* @inheritDoc */
- @Override
- public void downloadToLocalDirectory(IProgressMonitor monitor, File dir) {
- if (!dir.exists())
- dir.mkdirs();
-
- if (!dir.isDirectory()) {
- MessageDialog.openError(null, "Download to local file system",
- "Invalid directory location: \"" + dir + "\"");
- return;
- }
-
- File dfsPath = new File(this.getPath().toString());
- File destination = new File(dir, dfsPath.getName());
-
- if (!destination.exists()) {
- if (!destination.mkdir()) {
- MessageDialog.openError(null, "Download to local directory",
- "Unable to create directory " + destination.getAbsolutePath());
- return;
- }
- }
-
- // Download all DfsPath children
- for (Object childObj : getChildren()) {
- if (childObj instanceof DFSPath) {
- ((DFSPath) childObj).downloadToLocalDirectory(monitor, destination);
- monitor.worked(1);
- }
- }
- }
-
- /* @inheritDoc */
- @Override
- public int computeDownloadWork() {
- int work = 1;
- for (DFSContent child : getChildren()) {
- if (child instanceof DFSPath)
- work += ((DFSPath) child).computeDownloadWork();
- }
-
- return work;
- }
-
- /**
- * Create a new sub directory into this directory
- *
- * @param folderName
- */
- public void mkdir(String folderName) {
- try {
- getDFS().mkdirs(new Path(this.path, folderName));
- } catch (IOException ioe) {
- ioe.printStackTrace();
- }
- doRefresh();
- }
-
- /*
- * Implementation of DFSContent
- */
-
- /* @inheritDoc */
- public boolean hasChildren() {
- if (this.children == null)
- return true;
- else
- return (this.children.length > 0);
- }
-
- /* @inheritDoc */
- public DFSContent[] getChildren() {
- if (children == null) {
- new Job("Connecting to DFS " + location) {
- @Override
- protected IStatus run(IProgressMonitor monitor) {
- try {
- loadDFSFolderChildren();
- return Status.OK_STATUS;
-
- } catch (IOException ioe) {
- children =
- new DFSContent[] { new DFSMessage("Error: "
- + ioe.getLocalizedMessage()) };
- return Status.CANCEL_STATUS;
-
- } finally {
- // Under all circumstances, update the UI
- provider.refresh(DFSFolder.this);
- }
- }
- }.schedule();
-
- return new DFSContent[] { new DFSMessage("Listing folder content...") };
- }
- return this.children;
- }
-
- /* @inheritDoc */
- @Override
- public void refresh() {
- this.children = null;
- this.doRefresh();
- }
-
- /* @inheritDoc */
- @Override
- public String toString() {
- return String.format("%s (%s)", super.toString(),
- this.getChildren().length);
- }
-
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSLocation.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSLocation.java
deleted file mode 100644
index 31c8fb30e15..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSLocation.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.dfs;
-
-import java.io.IOException;
-
-import org.apache.hadoop.eclipse.server.HadoopServer;
-import org.eclipse.core.runtime.IProgressMonitor;
-import org.eclipse.core.runtime.IStatus;
-import org.eclipse.core.runtime.Status;
-import org.eclipse.core.runtime.jobs.Job;
-
-/**
- * DFS Content representation of a HDFS location
- */
-public class DFSLocation implements DFSContent {
-
- private final DFSContentProvider provider;
-
- private final HadoopServer location;
-
- private DFSContent rootFolder = null;
-
- DFSLocation(DFSContentProvider provider, HadoopServer server) {
- this.provider = provider;
- this.location = server;
- }
-
- /* @inheritDoc */
- @Override
- public String toString() {
- return this.location.getLocationName();
- }
-
- /*
- * Implementation of DFSContent
- */
-
- /* @inheritDoc */
- public DFSContent[] getChildren() {
- if (this.rootFolder == null) {
- /*
- * DfsFolder constructor might block as it contacts the NameNode: work
- * asynchronously here or this will potentially freeze the UI
- */
- new Job("Connecting to DFS " + location) {
- @Override
- protected IStatus run(IProgressMonitor monitor) {
- try {
- rootFolder = new DFSFolder(provider, location);
- return Status.OK_STATUS;
-
- } catch (IOException ioe) {
- rootFolder =
- new DFSMessage("Error: " + ioe.getLocalizedMessage());
- return Status.CANCEL_STATUS;
-
- } finally {
- // Under all circumstances, update the UI
- provider.refresh(DFSLocation.this);
- }
- }
- }.schedule();
-
- return new DFSContent[] { new DFSMessage("Connecting to DFS "
- + toString()) };
- }
- return new DFSContent[] { this.rootFolder };
- }
-
- /* @inheritDoc */
- public boolean hasChildren() {
- return true;
- }
-
- /* @inheritDoc */
- public void refresh() {
- this.rootFolder = null;
- this.provider.refresh(this);
- }
-
- /*
- * Actions
- */
-
- /**
- * Refresh the location using a new connection
- */
- public void reconnect() {
- this.refresh();
- }
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSLocationsRoot.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSLocationsRoot.java
deleted file mode 100644
index 9d9a60909eb..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSLocationsRoot.java
+++ /dev/null
@@ -1,150 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.dfs;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.hadoop.eclipse.server.HadoopServer;
-import org.apache.hadoop.eclipse.servers.IHadoopServerListener;
-import org.apache.hadoop.eclipse.servers.ServerRegistry;
-import org.apache.hadoop.fs.FileSystem;
-
-/**
- * Representation of the root element containing all DFS servers. This
- * content registers an observer on Hadoop servers so as to update itself
- * when servers are updated.
- */
-public class DFSLocationsRoot implements DFSContent, IHadoopServerListener {
-
- /**
- *
- */
- private final DFSContentProvider provider;
-
- private Map map =
- new HashMap();
-
- /**
- * Register a listeners to track DFS locations updates
- *
- * @param provider the content provider this content is the root of
- */
- DFSLocationsRoot(DFSContentProvider provider) {
- this.provider = provider;
- ServerRegistry.getInstance().addListener(this);
- this.refresh();
- }
-
- /*
- * Implementation of IHadoopServerListener
- */
-
- /* @inheritDoc */
- public synchronized void serverChanged(final HadoopServer location,
- final int type) {
-
- switch (type) {
- case ServerRegistry.SERVER_STATE_CHANGED: {
- this.provider.refresh(map.get(location));
- break;
- }
-
- case ServerRegistry.SERVER_ADDED: {
- DFSLocation dfsLoc = new DFSLocation(provider, location);
- map.put(location, dfsLoc);
- this.provider.refresh(this);
- break;
- }
-
- case ServerRegistry.SERVER_REMOVED: {
- map.remove(location);
- this.provider.refresh(this);
- break;
- }
- }
- }
-
- /**
- * Recompute the map of Hadoop locations
- */
- private synchronized void reloadLocations() {
- map.clear();
- for (HadoopServer location : ServerRegistry.getInstance().getServers())
- map.put(location, new DFSLocation(provider, location));
- }
-
- /* @inheritDoc */
- @Override
- public String toString() {
- return "DFS Locations";
- }
-
- /*
- * Implementation of DFSContent
- */
-
- /* @inheritDoc */
- public synchronized DFSContent[] getChildren() {
- return this.map.values().toArray(new DFSContent[this.map.size()]);
- }
-
- /* @inheritDoc */
- public boolean hasChildren() {
- return (this.map.size() > 0);
- }
-
- /* @inheritDoc */
- public void refresh() {
- reloadLocations();
- this.provider.refresh(this);
- }
-
- /*
- * Actions
- */
-
- public void disconnect() {
- Thread closeThread = new Thread() {
- /* @inheritDoc */
- @Override
- public void run() {
- try {
- System.out.printf("Closing all opened File Systems...\n");
- FileSystem.closeAll();
- System.out.printf("File Systems closed\n");
-
- } catch (IOException ioe) {
- ioe.printStackTrace();
- }
- }
- };
-
- // Wait 5 seconds for the connections to be closed
- closeThread.start();
- try {
- closeThread.join(5000);
-
- } catch (InterruptedException ie) {
- // Ignore
- }
- }
-
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSMessage.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSMessage.java
deleted file mode 100644
index ce83b9aa260..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSMessage.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.dfs;
-
-/**
- * DFS Content that displays a message.
- */
-class DFSMessage implements DFSContent {
-
- private String message;
-
- DFSMessage(String message) {
- this.message = message;
- }
-
- /* @inheritDoc */
- @Override
- public String toString() {
- return this.message;
- }
-
- /*
- * Implementation of DFSContent
- */
-
- /* @inheritDoc */
- public DFSContent[] getChildren() {
- return null;
- }
-
- /* @inheritDoc */
- public boolean hasChildren() {
- return false;
- }
-
- /* @inheritDoc */
- public void refresh() {
- // Nothing to do
- }
-
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSPath.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSPath.java
deleted file mode 100644
index 0abd53815f7..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSPath.java
+++ /dev/null
@@ -1,160 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.dfs;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.logging.Logger;
-
-import org.apache.hadoop.hdfs.DistributedFileSystem;
-import org.apache.hadoop.eclipse.ErrorMessageDialog;
-import org.apache.hadoop.eclipse.server.ConfProp;
-import org.apache.hadoop.eclipse.server.HadoopServer;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.eclipse.core.runtime.IProgressMonitor;
-import org.eclipse.jface.dialogs.MessageDialog;
-
-/**
- * DFS Path handling for DFS
- */
-public abstract class DFSPath implements DFSContent {
-
- protected final DFSContentProvider provider;
-
- protected HadoopServer location;
-
- private DistributedFileSystem dfs = null;
-
- protected final Path path;
-
- protected final DFSPath parent;
-
- /**
- * For debugging purpose
- */
- static Logger log = Logger.getLogger(DFSPath.class.getName());
-
- /**
- * Create a path representation for the given location in the given viewer
- *
- * @param location
- * @param path
- * @param viewer
- */
- public DFSPath(DFSContentProvider provider, HadoopServer location)
- throws IOException {
-
- this.provider = provider;
- this.location = location;
- this.path = new Path("/");
- this.parent = null;
- }
-
- /**
- * Create a sub-path representation for the given parent path
- *
- * @param parent
- * @param path
- */
- protected DFSPath(DFSPath parent, Path path) {
- this.provider = parent.provider;
- this.location = parent.location;
- this.dfs = parent.dfs;
- this.parent = parent;
- this.path = path;
- }
-
- protected void dispose() {
- // Free the DFS connection
- }
-
- /* @inheritDoc */
- @Override
- public String toString() {
- if (path.equals("/")) {
- return location.getConfProp(ConfProp.FS_DEFAULT_URI);
-
- } else {
- return this.path.getName();
- }
- }
-
- /**
- * Does a recursive delete of the remote directory tree at this node.
- */
- public void delete() {
- try {
- getDFS().delete(this.path, true);
-
- } catch (IOException e) {
- e.printStackTrace();
- MessageDialog.openWarning(null, "Delete file",
- "Unable to delete file \"" + this.path + "\"\n" + e);
- }
- }
-
- public DFSPath getParent() {
- return parent;
- }
-
- public abstract void refresh();
-
- /**
- * Refresh the UI element for this content
- */
- public void doRefresh() {
- provider.refresh(this);
- }
-
- /**
- * Copy the DfsPath to the given local directory
- *
- * @param directory the local directory
- */
- public abstract void downloadToLocalDirectory(IProgressMonitor monitor,
- File dir);
-
- public Path getPath() {
- return this.path;
- }
-
- /**
- * Gets a connection to the DFS
- *
- * @return a connection to the DFS
- * @throws IOException
- */
- DistributedFileSystem getDFS() throws IOException {
- if (this.dfs == null) {
- FileSystem fs = location.getDFS();
- if (!(fs instanceof DistributedFileSystem)) {
- ErrorMessageDialog.display("DFS Browser",
- "The DFS Browser cannot browse anything else "
- + "but a Distributed File System!");
- throw new IOException("DFS Browser expects a DistributedFileSystem!");
- }
- this.dfs = (DistributedFileSystem) fs;
- }
- return this.dfs;
- }
-
- public abstract int computeDownloadWork();
-
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/HadoopApplicationLaunchShortcut.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/HadoopApplicationLaunchShortcut.java
deleted file mode 100644
index 3297c6447ad..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/HadoopApplicationLaunchShortcut.java
+++ /dev/null
@@ -1,142 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.launch;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.logging.Logger;
-
-import org.apache.hadoop.eclipse.servers.RunOnHadoopWizard;
-import org.eclipse.core.resources.IFile;
-import org.eclipse.core.resources.IResource;
-import org.eclipse.core.runtime.CoreException;
-import org.eclipse.debug.core.ILaunchConfiguration;
-import org.eclipse.debug.core.ILaunchConfigurationType;
-import org.eclipse.debug.core.ILaunchConfigurationWorkingCopy;
-import org.eclipse.jdt.core.IJavaProject;
-import org.eclipse.jdt.core.IType;
-import org.eclipse.jdt.core.JavaCore;
-import org.eclipse.jdt.internal.debug.ui.launcher.JavaApplicationLaunchShortcut;
-import org.eclipse.jdt.launching.IJavaLaunchConfigurationConstants;
-import org.eclipse.jdt.launching.IRuntimeClasspathEntry;
-import org.eclipse.jdt.launching.JavaRuntime;
-import org.eclipse.jface.wizard.IWizard;
-import org.eclipse.jface.wizard.WizardDialog;
-import org.eclipse.swt.widgets.Display;
-import org.eclipse.swt.widgets.Shell;
-
-/**
- * Add a shortcut "Run on Hadoop" to the Run menu
- */
-
-public class HadoopApplicationLaunchShortcut extends
- JavaApplicationLaunchShortcut {
-
- static Logger log =
- Logger.getLogger(HadoopApplicationLaunchShortcut.class.getName());
-
- // private ActionDelegate delegate = new RunOnHadoopActionDelegate();
-
- public HadoopApplicationLaunchShortcut() {
- }
-
- /* @inheritDoc */
- @Override
- protected ILaunchConfiguration findLaunchConfiguration(IType type,
- ILaunchConfigurationType configType) {
-
- // Find an existing or create a launch configuration (Standard way)
- ILaunchConfiguration iConf =
- super.findLaunchConfiguration(type, configType);
-
- ILaunchConfigurationWorkingCopy iConfWC;
- try {
- /*
- * Tune the default launch configuration: setup run-time classpath
- * manually
- */
- iConfWC = iConf.getWorkingCopy();
-
- iConfWC.setAttribute(
- IJavaLaunchConfigurationConstants.ATTR_DEFAULT_CLASSPATH, false);
-
- List classPath = new ArrayList();
- IResource resource = type.getResource();
- IJavaProject project =
- (IJavaProject) resource.getProject().getNature(JavaCore.NATURE_ID);
- IRuntimeClasspathEntry cpEntry =
- JavaRuntime.newDefaultProjectClasspathEntry(project);
- classPath.add(0, cpEntry.getMemento());
-
- iConfWC.setAttribute(IJavaLaunchConfigurationConstants.ATTR_CLASSPATH,
- classPath);
-
- } catch (CoreException e) {
- e.printStackTrace();
- // FIXME Error dialog
- return null;
- }
-
- /*
- * Update the selected configuration with a specific Hadoop location
- * target
- */
- IResource resource = type.getResource();
- if (!(resource instanceof IFile))
- return null;
- RunOnHadoopWizard wizard =
- new RunOnHadoopWizard((IFile) resource, iConfWC);
- WizardDialog dialog =
- new WizardDialog(Display.getDefault().getActiveShell(), wizard);
-
- dialog.create();
- dialog.setBlockOnOpen(true);
- if (dialog.open() != WizardDialog.OK)
- return null;
-
- try {
- iConfWC.doSave();
-
- } catch (CoreException e) {
- e.printStackTrace();
- // FIXME Error dialog
- return null;
- }
-
- return iConfWC;
- }
-
- /**
- * Was used to run the RunOnHadoopWizard inside and provide it a
- * ProgressMonitor
- */
- static class Dialog extends WizardDialog {
- public Dialog(Shell parentShell, IWizard newWizard) {
- super(parentShell, newWizard);
- }
-
- @Override
- public void create() {
- super.create();
-
- ((RunOnHadoopWizard) getWizard())
- .setProgressMonitor(getProgressMonitor());
- }
- }
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/LocalMapReduceLaunchTabGroup.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/LocalMapReduceLaunchTabGroup.java
deleted file mode 100644
index 66db5d2d3cf..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/LocalMapReduceLaunchTabGroup.java
+++ /dev/null
@@ -1,182 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.launch;
-
-import org.eclipse.core.runtime.CoreException;
-import org.eclipse.debug.core.ILaunchConfiguration;
-import org.eclipse.debug.core.ILaunchConfigurationWorkingCopy;
-import org.eclipse.debug.ui.AbstractLaunchConfigurationTab;
-import org.eclipse.debug.ui.AbstractLaunchConfigurationTabGroup;
-import org.eclipse.debug.ui.CommonTab;
-import org.eclipse.debug.ui.ILaunchConfigurationDialog;
-import org.eclipse.debug.ui.ILaunchConfigurationTab;
-import org.eclipse.jdt.core.IType;
-import org.eclipse.jdt.core.JavaModelException;
-import org.eclipse.jdt.core.dom.AST;
-import org.eclipse.jdt.core.search.SearchEngine;
-import org.eclipse.jdt.debug.ui.launchConfigurations.JavaArgumentsTab;
-import org.eclipse.jdt.debug.ui.launchConfigurations.JavaClasspathTab;
-import org.eclipse.jdt.debug.ui.launchConfigurations.JavaJRETab;
-import org.eclipse.jdt.ui.IJavaElementSearchConstants;
-import org.eclipse.jdt.ui.JavaUI;
-import org.eclipse.jface.dialogs.ProgressMonitorDialog;
-import org.eclipse.jface.window.Window;
-import org.eclipse.swt.SWT;
-import org.eclipse.swt.layout.GridData;
-import org.eclipse.swt.layout.GridLayout;
-import org.eclipse.swt.widgets.Button;
-import org.eclipse.swt.widgets.Composite;
-import org.eclipse.swt.widgets.Event;
-import org.eclipse.swt.widgets.Label;
-import org.eclipse.swt.widgets.Listener;
-import org.eclipse.swt.widgets.Text;
-import org.eclipse.ui.dialogs.SelectionDialog;
-
-/**
- *
- * Handler for Local MapReduce job launches
- *
- * TODO(jz) this may not be needed as we almost always deploy to a remote server
- * and not locally, where we do do it locally we may just be able to exec
- * scripts without going to java
- *
- */
-public class LocalMapReduceLaunchTabGroup extends
- AbstractLaunchConfigurationTabGroup {
-
- public LocalMapReduceLaunchTabGroup() {
- // TODO Auto-generated constructor stub
- }
-
- public void createTabs(ILaunchConfigurationDialog dialog, String mode) {
- setTabs(new ILaunchConfigurationTab[] { new MapReduceLaunchTab(),
- new JavaArgumentsTab(), new JavaJRETab(), new JavaClasspathTab(),
- new CommonTab() });
- }
-
- public static class MapReduceLaunchTab extends AbstractLaunchConfigurationTab {
- private Text combinerClass;
-
- private Text reducerClass;
-
- private Text mapperClass;
-
- @Override
- public boolean canSave() {
- return true;
- }
-
- @Override
- public boolean isValid(ILaunchConfiguration launchConfig) {
- // todo: only if all classes are of proper types
- return true;
- }
-
- public void createControl(final Composite parent) {
- Composite panel = new Composite(parent, SWT.NONE);
- GridLayout layout = new GridLayout(3, false);
- panel.setLayout(layout);
-
- Label mapperLabel = new Label(panel, SWT.NONE);
- mapperLabel.setText("Mapper");
- mapperClass = new Text(panel, SWT.SINGLE | SWT.BORDER);
- createRow(parent, panel, mapperClass);
-
- Label reducerLabel = new Label(panel, SWT.NONE);
- reducerLabel.setText("Reducer");
- reducerClass = new Text(panel, SWT.SINGLE | SWT.BORDER);
- createRow(parent, panel, reducerClass);
-
- Label combinerLabel = new Label(panel, SWT.NONE);
- combinerLabel.setText("Combiner");
- combinerClass = new Text(panel, SWT.SINGLE | SWT.BORDER);
- createRow(parent, panel, combinerClass);
-
- panel.pack();
- setControl(panel);
- }
-
- private void createRow(final Composite parent, Composite panel,
- final Text text) {
- text.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
- Button button = new Button(panel, SWT.BORDER);
- button.setText("Browse...");
- button.addListener(SWT.Selection, new Listener() {
- public void handleEvent(Event arg0) {
- try {
- AST ast = AST.newAST(3);
-
- SelectionDialog dialog = JavaUI.createTypeDialog(parent.getShell(),
- new ProgressMonitorDialog(parent.getShell()), SearchEngine
- .createWorkspaceScope(),
- IJavaElementSearchConstants.CONSIDER_CLASSES, false);
- dialog.setMessage("Select Mapper type (implementing )");
- dialog.setBlockOnOpen(true);
- dialog.setTitle("Select Mapper Type");
- dialog.open();
-
- if ((dialog.getReturnCode() == Window.OK)
- && (dialog.getResult().length > 0)) {
- IType type = (IType) dialog.getResult()[0];
- text.setText(type.getFullyQualifiedName());
- setDirty(true);
- }
- } catch (JavaModelException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- }
- });
- }
-
- public String getName() {
- return "Hadoop";
- }
-
- public void initializeFrom(ILaunchConfiguration configuration) {
- try {
- mapperClass.setText(configuration.getAttribute(
- "org.apache.hadoop.eclipse.launch.mapper", ""));
- reducerClass.setText(configuration.getAttribute(
- "org.apache.hadoop.eclipse.launch.reducer", ""));
- combinerClass.setText(configuration.getAttribute(
- "org.apache.hadoop.eclipse.launch.combiner", ""));
- } catch (CoreException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- setErrorMessage(e.getMessage());
- }
- }
-
- public void performApply(ILaunchConfigurationWorkingCopy configuration) {
- configuration.setAttribute("org.apache.hadoop.eclipse.launch.mapper",
- mapperClass.getText());
- configuration.setAttribute(
- "org.apache.hadoop.eclipse.launch.reducer", reducerClass
- .getText());
- configuration.setAttribute(
- "org.apache.hadoop.eclipse.launch.combiner", combinerClass
- .getText());
- }
-
- public void setDefaults(ILaunchConfigurationWorkingCopy configuration) {
-
- }
- }
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/MutexRule.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/MutexRule.java
deleted file mode 100644
index 46df4491056..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/MutexRule.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.launch;
-
-import org.eclipse.core.runtime.jobs.ISchedulingRule;
-
-public class MutexRule implements ISchedulingRule {
- private final String id;
-
- public MutexRule(String id) {
- this.id = id;
- }
-
- public boolean contains(ISchedulingRule rule) {
- return (rule instanceof MutexRule) && ((MutexRule) rule).id.equals(id);
- }
-
- public boolean isConflicting(ISchedulingRule rule) {
- return (rule instanceof MutexRule) && ((MutexRule) rule).id.equals(id);
- }
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/StartHadoopLaunchTabGroup.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/StartHadoopLaunchTabGroup.java
deleted file mode 100644
index 047ba179a61..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/StartHadoopLaunchTabGroup.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.launch;
-
-import org.eclipse.debug.ui.AbstractLaunchConfigurationTabGroup;
-import org.eclipse.debug.ui.CommonTab;
-import org.eclipse.debug.ui.ILaunchConfigurationDialog;
-import org.eclipse.debug.ui.ILaunchConfigurationTab;
-import org.eclipse.jdt.debug.ui.launchConfigurations.JavaArgumentsTab;
-import org.eclipse.jdt.debug.ui.launchConfigurations.JavaClasspathTab;
-import org.eclipse.jdt.debug.ui.launchConfigurations.JavaJRETab;
-
-/**
- * Create the tab group for the dialog window for starting a Hadoop job.
- */
-
-public class StartHadoopLaunchTabGroup extends
- AbstractLaunchConfigurationTabGroup {
-
- public StartHadoopLaunchTabGroup() {
- // TODO Auto-generated constructor stub
- }
-
- /**
- * TODO(jz) consider the appropriate tabs for this case
- */
- public void createTabs(ILaunchConfigurationDialog dialog, String mode) {
- setTabs(new ILaunchConfigurationTab[] { new JavaArgumentsTab(),
- new JavaJRETab(), new JavaClasspathTab(), new CommonTab() });
- }
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/MapReducePreferencePage.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/MapReducePreferencePage.java
deleted file mode 100644
index cef50a3475b..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/MapReducePreferencePage.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.eclipse.preferences;
-
-import org.apache.hadoop.eclipse.Activator;
-import org.eclipse.jface.preference.DirectoryFieldEditor;
-import org.eclipse.jface.preference.FieldEditorPreferencePage;
-import org.eclipse.ui.IWorkbench;
-import org.eclipse.ui.IWorkbenchPreferencePage;
-
-/**
- * This class represents a preference page that is contributed to the
- * Preferences dialog. By sub-classing FieldEditorPreferencePage,
- * we can use the field support built into JFace that allows us to create a
- * page that is small and knows how to save, restore and apply itself.
- *
- *
- * This page is used to modify preferences only. They are stored in the
- * preference store that belongs to the main plug-in class. That way,
- * preferences can be accessed directly via the preference store.
- */
-
-public class MapReducePreferencePage extends FieldEditorPreferencePage
- implements IWorkbenchPreferencePage {
-
- public MapReducePreferencePage() {
- super(GRID);
- setPreferenceStore(Activator.getDefault().getPreferenceStore());
- setTitle("Hadoop Map/Reduce Tools");
- // setDescription("Hadoop Map/Reduce Preferences");
- }
-
- /**
- * Creates the field editors. Field editors are abstractions of the common
- * GUI blocks needed to manipulate various types of preferences. Each field
- * editor knows how to save and restore itself.
- */
- @Override
- public void createFieldEditors() {
- addField(new DirectoryFieldEditor(PreferenceConstants.P_PATH,
- "&Hadoop installation directory:", getFieldEditorParent()));
-
- }
-
- /* @inheritDoc */
- public void init(IWorkbench workbench) {
- }
-
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/PreferenceConstants.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/PreferenceConstants.java
deleted file mode 100644
index 74641bb28a7..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/PreferenceConstants.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.preferences;
-
-/**
- * Constant definitions for plug-in preferences
- */
-public class PreferenceConstants {
-
- public static final String P_PATH = "pathPreference";
-
- // public static final String P_BOOLEAN = "booleanPreference";
- //
- // public static final String P_CHOICE = "choicePreference";
- //
- // public static final String P_STRING = "stringPreference";
- //
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/PreferenceInitializer.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/PreferenceInitializer.java
deleted file mode 100644
index 444050a9920..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/PreferenceInitializer.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.preferences;
-
-import org.eclipse.core.runtime.preferences.AbstractPreferenceInitializer;
-
-/**
- * Class used to initialize default preference values.
- */
-public class PreferenceInitializer extends AbstractPreferenceInitializer {
-
- /* @inheritDoc */
- @Override
- public void initializeDefaultPreferences() {
- }
-
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/ConfProp.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/ConfProp.java
deleted file mode 100644
index 7c84ff09c33..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/ConfProp.java
+++ /dev/null
@@ -1,147 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.server;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.hadoop.conf.Configuration;
-
-public enum ConfProp {
- /**
- * Property name for the Hadoop location name
- */
- PI_LOCATION_NAME(true, "location.name", "New Hadoop location"),
-
- /**
- * Property name for the master host name (the Job tracker)
- */
- PI_JOB_TRACKER_HOST(true, "jobtracker.host", "localhost"),
-
- /**
- * Property name for the DFS master host name (the Name node)
- */
- PI_NAME_NODE_HOST(true, "namenode.host", "localhost"),
-
- /**
- * Property name for the installation directory on the master node
- */
- // PI_INSTALL_DIR(true, "install.dir", "/dir/hadoop-version/"),
- /**
- * User name to use for Hadoop operations
- */
- PI_USER_NAME(true, "user.name", System.getProperty("user.name",
- "who are you?")),
-
- /**
- * Property name for SOCKS proxy activation
- */
- PI_SOCKS_PROXY_ENABLE(true, "socks.proxy.enable", "no"),
-
- /**
- * Property name for the SOCKS proxy host
- */
- PI_SOCKS_PROXY_HOST(true, "socks.proxy.host", "host"),
-
- /**
- * Property name for the SOCKS proxy port
- */
- PI_SOCKS_PROXY_PORT(true, "socks.proxy.port", "1080"),
-
- /**
- * TCP port number for the name node
- */
- PI_NAME_NODE_PORT(true, "namenode.port", "50040"),
-
- /**
- * TCP port number for the job tracker
- */
- PI_JOB_TRACKER_PORT(true, "jobtracker.port", "50020"),
-
- /**
- * Are the Map/Reduce and the Distributed FS masters hosted on the same
- * machine?
- */
- PI_COLOCATE_MASTERS(true, "masters.colocate", "yes"),
-
- /**
- * Property name for naming the job tracker (URI). This property is related
- * to {@link #PI_MASTER_HOST_NAME}
- */
- JOB_TRACKER_URI(false, "mapred.job.tracker", "localhost:50020"),
-
- /**
- * Property name for naming the default file system (URI).
- */
- FS_DEFAULT_URI(false, "fs.default.name", "hdfs://localhost:50040/"),
-
- /**
- * Property name for the default socket factory:
- */
- SOCKET_FACTORY_DEFAULT(false, "hadoop.rpc.socket.factory.class.default",
- "org.apache.hadoop.net.StandardSocketFactory"),
-
- /**
- * Property name for the SOCKS server URI.
- */
- SOCKS_SERVER(false, "hadoop.socks.server", "host:1080"),
-
- ;
-
- /**
- * Map -> ConfProp
- */
- private static Map map;
-
- private static synchronized void registerProperty(String name,
- ConfProp prop) {
-
- if (ConfProp.map == null)
- ConfProp.map = new HashMap();
-
- ConfProp.map.put(name, prop);
- }
-
- public static ConfProp getByName(String propName) {
- return map.get(propName);
- }
-
- public final String name;
-
- public final String defVal;
-
- ConfProp(boolean internal, String name, String defVal) {
- if (internal)
- name = "eclipse.plug-in." + name;
- this.name = name;
- this.defVal = defVal;
-
- ConfProp.registerProperty(name, this);
- }
-
- String get(Configuration conf) {
- return conf.get(name);
- }
-
- void set(Configuration conf, String value) {
- assert value != null;
- conf.set(name, value);
- }
-
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/HadoopJob.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/HadoopJob.java
deleted file mode 100644
index 8745200580c..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/HadoopJob.java
+++ /dev/null
@@ -1,346 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.server;
-
-import java.io.File;
-import java.io.IOException;
-
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapred.Counters;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobID;
-import org.apache.hadoop.mapred.JobStatus;
-import org.apache.hadoop.mapred.RunningJob;
-
-/**
- * Representation of a Map/Reduce running job on a given location
- */
-
-public class HadoopJob {
-
- /**
- * Enum representation of a Job state
- */
- public enum JobState {
- PREPARE(JobStatus.PREP), RUNNING(JobStatus.RUNNING), FAILED(
- JobStatus.FAILED), SUCCEEDED(JobStatus.SUCCEEDED);
-
- final int state;
-
- JobState(int state) {
- this.state = state;
- }
-
- static JobState ofInt(int state) {
- switch (state) {
- case JobStatus.PREP:
- return PREPARE;
- case JobStatus.RUNNING:
- return RUNNING;
- case JobStatus.FAILED:
- return FAILED;
- case JobStatus.SUCCEEDED:
- return SUCCEEDED;
- default:
- return null;
- }
- }
- }
-
- /**
- * Location this Job runs on
- */
- private final HadoopServer location;
-
- /**
- * Unique identifier of this Job
- */
- final JobID jobId;
-
- /**
- * Status representation of a running job. This actually contains a
- * reference to a JobClient. Its methods might block.
- */
- RunningJob running;
-
- /**
- * Last polled status
- *
- * @deprecated should apparently not be used
- */
- JobStatus status;
-
- /**
- * Last polled counters
- */
- Counters counters;
-
- /**
- * Job Configuration
- */
- JobConf jobConf = null;
-
- boolean completed = false;
-
- boolean successful = false;
-
- boolean killed = false;
-
- int totalMaps;
-
- int totalReduces;
-
- int completedMaps;
-
- int completedReduces;
-
- float mapProgress;
-
- float reduceProgress;
-
- /**
- * Constructor for a Hadoop job representation
- *
- * @param location
- * @param id
- * @param running
- * @param status
- */
- public HadoopJob(HadoopServer location, JobID id, RunningJob running,
- JobStatus status) {
-
- this.location = location;
- this.jobId = id;
- this.running = running;
-
- loadJobFile();
-
- update(status);
- }
-
- /**
- * Try to locate and load the JobConf file for this job so to get more
- * details on the job (number of maps and of reduces)
- */
- private void loadJobFile() {
- try {
- String jobFile = getJobFile();
- FileSystem fs = location.getDFS();
- File tmp = File.createTempFile(getJobID().toString(), ".xml");
- if (FileUtil.copy(fs, new Path(jobFile), tmp, false, location
- .getConfiguration())) {
- this.jobConf = new JobConf(tmp.toString());
-
- this.totalMaps = jobConf.getNumMapTasks();
- this.totalReduces = jobConf.getNumReduceTasks();
- }
-
- } catch (IOException ioe) {
- ioe.printStackTrace();
- }
- }
-
- /* @inheritDoc */
- @Override
- public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + ((jobId == null) ? 0 : jobId.hashCode());
- result = prime * result + ((location == null) ? 0 : location.hashCode());
- return result;
- }
-
- /* @inheritDoc */
- @Override
- public boolean equals(Object obj) {
- if (this == obj)
- return true;
- if (obj == null)
- return false;
- if (!(obj instanceof HadoopJob))
- return false;
- final HadoopJob other = (HadoopJob) obj;
- if (jobId == null) {
- if (other.jobId != null)
- return false;
- } else if (!jobId.equals(other.jobId))
- return false;
- if (location == null) {
- if (other.location != null)
- return false;
- } else if (!location.equals(other.location))
- return false;
- return true;
- }
-
- /**
- * Get the running status of the Job (@see {@link JobStatus}).
- *
- * @return
- */
- public JobState getState() {
- if (this.completed) {
- if (this.successful) {
- return JobState.SUCCEEDED;
- } else {
- return JobState.FAILED;
- }
- } else {
- return JobState.RUNNING;
- }
- // return JobState.ofInt(this.status.getRunState());
- }
-
- /**
- * @return
- */
- public JobID getJobID() {
- return this.jobId;
- }
-
- /**
- * @return
- */
- public HadoopServer getLocation() {
- return this.location;
- }
-
- /**
- * @return
- */
- public boolean isCompleted() {
- return this.completed;
- }
-
- /**
- * @return
- */
- public String getJobName() {
- return this.running.getJobName();
- }
-
- /**
- * @return
- */
- public String getJobFile() {
- return this.running.getJobFile();
- }
-
- /**
- * Return the tracking URL for this Job.
- *
- * @return string representation of the tracking URL for this Job
- */
- public String getTrackingURL() {
- return this.running.getTrackingURL();
- }
-
- /**
- * Returns a string representation of this job status
- *
- * @return string representation of this job status
- */
- public String getStatus() {
-
- StringBuffer s = new StringBuffer();
-
- s.append("Maps : " + completedMaps + "/" + totalMaps);
- s.append(" (" + mapProgress + ")");
- s.append(" Reduces : " + completedReduces + "/" + totalReduces);
- s.append(" (" + reduceProgress + ")");
-
- return s.toString();
- }
-
- /**
- * Update this job status according to the given JobStatus
- *
- * @param status
- */
- void update(JobStatus status) {
- this.status = status;
- try {
- this.counters = running.getCounters();
- this.completed = running.isComplete();
- this.successful = running.isSuccessful();
- this.mapProgress = running.mapProgress();
- this.reduceProgress = running.reduceProgress();
- // running.getTaskCompletionEvents(fromEvent);
-
- } catch (IOException ioe) {
- ioe.printStackTrace();
- }
-
- this.completedMaps = (int) (this.totalMaps * this.mapProgress);
- this.completedReduces = (int) (this.totalReduces * this.reduceProgress);
- }
-
- /**
- * Print this job counters (for debugging purpose)
- */
- void printCounters() {
- System.out.printf("New Job:\n", counters);
- for (String groupName : counters.getGroupNames()) {
- Counters.Group group = counters.getGroup(groupName);
- System.out.printf("\t%s[%s]\n", groupName, group.getDisplayName());
-
- for (Counters.Counter counter : group) {
- System.out.printf("\t\t%s: %s\n", counter.getDisplayName(),
- counter.getCounter());
- }
- }
- System.out.printf("\n");
- }
-
- /**
- * Kill this job
- */
- public void kill() {
- try {
- this.running.killJob();
- this.killed = true;
-
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
-
- /**
- * Print this job status (for debugging purpose)
- */
- public void display() {
- System.out.printf("Job id=%s, name=%s\n", getJobID(), getJobName());
- System.out.printf("Configuration file: %s\n", getJobID());
- System.out.printf("Tracking URL: %s\n", getTrackingURL());
-
- System.out.printf("Completion: map: %f reduce %f\n",
- 100.0 * this.mapProgress, 100.0 * this.reduceProgress);
-
- System.out.println("Job total maps = " + totalMaps);
- System.out.println("Job completed maps = " + completedMaps);
- System.out.println("Map percentage complete = " + mapProgress);
- System.out.println("Job total reduces = " + totalReduces);
- System.out.println("Job completed reduces = " + completedReduces);
- System.out.println("Reduce percentage complete = " + reduceProgress);
- System.out.flush();
- }
-
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/HadoopPathPage.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/HadoopPathPage.java
deleted file mode 100644
index cf58b9c25c8..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/HadoopPathPage.java
+++ /dev/null
@@ -1,124 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.server;
-
-import org.eclipse.core.runtime.IProgressMonitor;
-import org.eclipse.swt.graphics.Image;
-import org.eclipse.swt.widgets.Composite;
-import org.eclipse.ui.IEditorInput;
-import org.eclipse.ui.IEditorPart;
-import org.eclipse.ui.IEditorSite;
-import org.eclipse.ui.IPropertyListener;
-import org.eclipse.ui.IWorkbenchPartSite;
-import org.eclipse.ui.PartInitException;
-
-public class HadoopPathPage implements IEditorPart {
-
- public IEditorInput getEditorInput() {
- // TODO Auto-generated method stub
- return null;
- }
-
- public IEditorSite getEditorSite() {
- // TODO Auto-generated method stub
- return null;
- }
-
- public void init(IEditorSite site, IEditorInput input)
- throws PartInitException {
- // TODO Auto-generated method stub
-
- }
-
- public void addPropertyListener(IPropertyListener listener) {
- // TODO Auto-generated method stub
-
- }
-
- public void createPartControl(Composite parent) {
- // TODO Auto-generated method stub
-
- }
-
- public void dispose() {
- // TODO Auto-generated method stub
-
- }
-
- public IWorkbenchPartSite getSite() {
- // TODO Auto-generated method stub
- return null;
- }
-
- public String getTitle() {
- // TODO Auto-generated method stub
- return null;
- }
-
- public Image getTitleImage() {
- // TODO Auto-generated method stub
- return null;
- }
-
- public String getTitleToolTip() {
- // TODO Auto-generated method stub
- return null;
- }
-
- public void removePropertyListener(IPropertyListener listener) {
- // TODO Auto-generated method stub
-
- }
-
- public void setFocus() {
- // TODO Auto-generated method stub
-
- }
-
- public Object getAdapter(Class adapter) {
- // TODO Auto-generated method stub
- return null;
- }
-
- public void doSave(IProgressMonitor monitor) {
- // TODO Auto-generated method stub
-
- }
-
- public void doSaveAs() {
- // TODO Auto-generated method stub
-
- }
-
- public boolean isDirty() {
- // TODO Auto-generated method stub
- return false;
- }
-
- public boolean isSaveAsAllowed() {
- // TODO Auto-generated method stub
- return false;
- }
-
- public boolean isSaveOnCloseNeeded() {
- // TODO Auto-generated method stub
- return false;
- }
-
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/HadoopServer.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/HadoopServer.java
deleted file mode 100644
index 1454c428310..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/HadoopServer.java
+++ /dev/null
@@ -1,510 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.server;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeMap;
-import java.util.logging.Logger;
-
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.ParserConfigurationException;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.eclipse.Activator;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.mapred.JobClient;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobID;
-import org.apache.hadoop.mapred.JobStatus;
-import org.apache.hadoop.mapred.RunningJob;
-import org.eclipse.core.runtime.IProgressMonitor;
-import org.eclipse.core.runtime.IStatus;
-import org.eclipse.core.runtime.Status;
-import org.eclipse.core.runtime.jobs.Job;
-import org.eclipse.swt.widgets.Display;
-import org.w3c.dom.Document;
-import org.w3c.dom.Element;
-import org.w3c.dom.Node;
-import org.w3c.dom.NodeList;
-import org.w3c.dom.Text;
-import org.xml.sax.SAXException;
-
-/**
- * Representation of a Hadoop location, meaning of the master node (NameNode,
- * JobTracker).
- *
- *
- * This class does not create any SSH connection anymore. Tunneling must be
- * setup outside of Eclipse for now (using Putty or ssh -D<port>
- * <host>)
- *
- *
- * TODO
- *
Disable the updater if a location becomes unreachable or fails for
- * tool long
- * Stop the updater on location's disposal/removal
- */
-
-public class HadoopServer {
-
- /**
- * Frequency of location status observations expressed as the delay in ms
- * between each observation
- *
- * TODO Add a preference parameter for this
- */
- protected static final long STATUS_OBSERVATION_DELAY = 1500;
-
- /**
- *
- */
- public class LocationStatusUpdater extends Job {
-
- JobClient client = null;
-
- /**
- * Setup the updater
- */
- public LocationStatusUpdater() {
- super("Map/Reduce location status updater");
- this.setSystem(true);
- }
-
- /* @inheritDoc */
- @Override
- protected IStatus run(IProgressMonitor monitor) {
- if (client == null) {
- try {
- client = HadoopServer.this.getJobClient();
-
- } catch (IOException ioe) {
- client = null;
- return new Status(Status.ERROR, Activator.PLUGIN_ID, 0,
- "Cannot connect to the Map/Reduce location: "
- + HadoopServer.this.getLocationName(),
- ioe);
- }
- }
-
- try {
- // Set of all known existing Job IDs we want fresh info of
- Set missingJobIds =
- new HashSet(runningJobs.keySet());
-
- JobStatus[] jstatus = client.jobsToComplete();
- for (JobStatus status : jstatus) {
-
- JobID jobId = status.getJobID();
- missingJobIds.remove(jobId);
-
- HadoopJob hJob;
- synchronized (HadoopServer.this.runningJobs) {
- hJob = runningJobs.get(jobId);
- if (hJob == null) {
- // Unknown job, create an entry
- RunningJob running = client.getJob(jobId);
- hJob =
- new HadoopJob(HadoopServer.this, jobId, running, status);
- newJob(hJob);
- }
- }
-
- // Update HadoopJob with fresh infos
- updateJob(hJob, status);
- }
-
- // Ask explicitly for fresh info for these Job IDs
- for (JobID jobId : missingJobIds) {
- HadoopJob hJob = runningJobs.get(jobId);
- if (!hJob.isCompleted())
- updateJob(hJob, null);
- }
-
- } catch (IOException ioe) {
- client = null;
- return new Status(Status.ERROR, Activator.PLUGIN_ID, 0,
- "Cannot retrieve running Jobs on location: "
- + HadoopServer.this.getLocationName(), ioe);
- }
-
- // Schedule the next observation
- schedule(STATUS_OBSERVATION_DELAY);
-
- return Status.OK_STATUS;
- }
-
- /**
- * Stores and make the new job available
- *
- * @param data
- */
- private void newJob(final HadoopJob data) {
- runningJobs.put(data.getJobID(), data);
-
- Display.getDefault().asyncExec(new Runnable() {
- public void run() {
- fireJobAdded(data);
- }
- });
- }
-
- /**
- * Updates the status of a job
- *
- * @param job the job to update
- */
- private void updateJob(final HadoopJob job, JobStatus status) {
- job.update(status);
-
- Display.getDefault().asyncExec(new Runnable() {
- public void run() {
- fireJobChanged(job);
- }
- });
- }
-
- }
-
- static Logger log = Logger.getLogger(HadoopServer.class.getName());
-
- /**
- * Hadoop configuration of the location. Also contains specific parameters
- * for the plug-in. These parameters are prefix with eclipse.plug-in.*
- */
- private Configuration conf;
-
- /**
- * Jobs listeners
- */
- private Set jobListeners = new HashSet();
-
- /**
- * Jobs running on this location. The keys of this map are the Job IDs.
- */
- private transient Map runningJobs =
- Collections.synchronizedMap(new TreeMap());
-
- /**
- * Status updater for this location
- */
- private LocationStatusUpdater statusUpdater;
-
- // state and status - transient
- private transient String state = "";
-
- /**
- * Creates a new default Hadoop location
- */
- public HadoopServer() {
- this.conf = new Configuration();
- this.addPluginConfigDefaultProperties();
- }
-
- /**
- * Creates a location from a file
- *
- * @throws IOException
- * @throws SAXException
- * @throws ParserConfigurationException
- */
- public HadoopServer(File file) throws ParserConfigurationException,
- SAXException, IOException {
-
- this.conf = new Configuration();
- this.addPluginConfigDefaultProperties();
- this.loadFromXML(file);
- }
-
- /**
- * Create a new Hadoop location by copying an already existing one.
- *
- * @param source the location to copy
- */
- public HadoopServer(HadoopServer existing) {
- this();
- this.load(existing);
- }
-
- public void addJobListener(IJobListener l) {
- jobListeners.add(l);
- }
-
- public void dispose() {
- // TODO close DFS connections?
- }
-
- /**
- * List all elements that should be present in the Server window (all
- * servers and all jobs running on each servers)
- *
- * @return collection of jobs for this location
- */
- public Collection getJobs() {
- startStatusUpdater();
- return this.runningJobs.values();
- }
-
- /**
- * Remove the given job from the currently running jobs map
- *
- * @param job the job to remove
- */
- public void purgeJob(final HadoopJob job) {
- runningJobs.remove(job.getJobID());
- Display.getDefault().asyncExec(new Runnable() {
- public void run() {
- fireJobRemoved(job);
- }
- });
- }
-
- /**
- * Returns the {@link Configuration} defining this location.
- *
- * @return the location configuration
- */
- public Configuration getConfiguration() {
- return this.conf;
- }
-
- /**
- * Gets a Hadoop configuration property value
- *
- * @param prop the configuration property
- * @return the property value
- */
- public String getConfProp(ConfProp prop) {
- return prop.get(conf);
- }
-
- /**
- * Gets a Hadoop configuration property value
- *
- * @param propName the property name
- * @return the property value
- */
- public String getConfProp(String propName) {
- return this.conf.get(propName);
- }
-
- public String getLocationName() {
- return ConfProp.PI_LOCATION_NAME.get(conf);
- }
-
- /**
- * Returns the master host name of the Hadoop location (the Job tracker)
- *
- * @return the host name of the Job tracker
- */
- public String getMasterHostName() {
- return getConfProp(ConfProp.PI_JOB_TRACKER_HOST);
- }
-
- public String getState() {
- return state;
- }
-
- /**
- * Overwrite this location with the given existing location
- *
- * @param existing the existing location
- */
- public void load(HadoopServer existing) {
- this.conf = new Configuration(existing.conf);
- }
-
- /**
- * Overwrite this location with settings available in the given XML file.
- * The existing configuration is preserved if the XML file is invalid.
- *
- * @param file the file path of the XML file
- * @return validity of the XML file
- * @throws ParserConfigurationException
- * @throws IOException
- * @throws SAXException
- */
- public boolean loadFromXML(File file) throws ParserConfigurationException,
- SAXException, IOException {
-
- Configuration newConf = new Configuration(this.conf);
-
- DocumentBuilder builder =
- DocumentBuilderFactory.newInstance().newDocumentBuilder();
- Document document = builder.parse(file);
-
- Element root = document.getDocumentElement();
- if (!"configuration".equals(root.getTagName()))
- return false;
- NodeList props = root.getChildNodes();
- for (int i = 0; i < props.getLength(); i++) {
- Node propNode = props.item(i);
- if (!(propNode instanceof Element))
- continue;
- Element prop = (Element) propNode;
- if (!"property".equals(prop.getTagName()))
- return false;
- NodeList fields = prop.getChildNodes();
- String attr = null;
- String value = null;
- for (int j = 0; j < fields.getLength(); j++) {
- Node fieldNode = fields.item(j);
- if (!(fieldNode instanceof Element))
- continue;
- Element field = (Element) fieldNode;
- if ("name".equals(field.getTagName()))
- attr = ((Text) field.getFirstChild()).getData();
- if ("value".equals(field.getTagName()) && field.hasChildNodes())
- value = ((Text) field.getFirstChild()).getData();
- }
- if (attr != null && value != null)
- newConf.set(attr, value);
- }
-
- this.conf = newConf;
- return true;
- }
-
- /**
- * Sets a Hadoop configuration property value
- *
- * @param prop the property
- * @param propvalue the property value
- */
- public void setConfProp(ConfProp prop, String propValue) {
- prop.set(conf, propValue);
- }
-
- /**
- * Sets a Hadoop configuration property value
- *
- * @param propName the property name
- * @param propValue the property value
- */
- public void setConfProp(String propName, String propValue) {
- this.conf.set(propName, propValue);
- }
-
- public void setLocationName(String newName) {
- ConfProp.PI_LOCATION_NAME.set(conf, newName);
- }
-
- /**
- * Write this location settings to the given output stream
- *
- * @param out the output stream
- * @throws IOException
- */
- public void storeSettingsToFile(File file) throws IOException {
- FileOutputStream fos = new FileOutputStream(file);
- this.conf.writeXml(fos);
- fos.close();
- }
-
- /* @inheritDoc */
- @Override
- public String toString() {
- return this.getLocationName();
- }
-
- /**
- * Fill the configuration with valid default values
- */
- private void addPluginConfigDefaultProperties() {
- for (ConfProp prop : ConfProp.values()) {
- if (conf.get(prop.name) == null)
- conf.set(prop.name, prop.defVal);
- }
- }
-
- /**
- * Starts the location status updater
- */
- private synchronized void startStatusUpdater() {
- if (statusUpdater == null) {
- statusUpdater = new LocationStatusUpdater();
- statusUpdater.schedule();
- }
- }
-
- /*
- * Rewrite of the connecting and tunneling to the Hadoop location
- */
-
- /**
- * Provides access to the default file system of this location.
- *
- * @return a {@link FileSystem}
- */
- public FileSystem getDFS() throws IOException {
- return FileSystem.get(this.conf);
- }
-
- /**
- * Provides access to the Job tracking system of this location
- *
- * @return a {@link JobClient}
- */
- public JobClient getJobClient() throws IOException {
- JobConf jconf = new JobConf(this.conf);
- return new JobClient(jconf);
- }
-
- /*
- * Listeners handling
- */
-
- protected void fireJarPublishDone(JarModule jar) {
- for (IJobListener listener : jobListeners) {
- listener.publishDone(jar);
- }
- }
-
- protected void fireJarPublishStart(JarModule jar) {
- for (IJobListener listener : jobListeners) {
- listener.publishStart(jar);
- }
- }
-
- protected void fireJobAdded(HadoopJob job) {
- for (IJobListener listener : jobListeners) {
- listener.jobAdded(job);
- }
- }
-
- protected void fireJobRemoved(HadoopJob job) {
- for (IJobListener listener : jobListeners) {
- listener.jobRemoved(job);
- }
- }
-
- protected void fireJobChanged(HadoopJob job) {
- for (IJobListener listener : jobListeners) {
- listener.jobChanged(job);
- }
- }
-
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/IJobListener.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/IJobListener.java
deleted file mode 100644
index 1668e29622e..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/IJobListener.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.server;
-
-/**
- * Interface for updating/adding jobs to the MapReduce Server view.
- */
-public interface IJobListener {
-
- void jobChanged(HadoopJob job);
-
- void jobAdded(HadoopJob job);
-
- void jobRemoved(HadoopJob job);
-
- void publishStart(JarModule jar);
-
- void publishDone(JarModule jar);
-
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/JarModule.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/JarModule.java
deleted file mode 100644
index 828e205cefe..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/JarModule.java
+++ /dev/null
@@ -1,146 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.server;
-
-import java.io.File;
-import java.util.logging.Logger;
-
-import org.apache.hadoop.eclipse.Activator;
-import org.apache.hadoop.eclipse.ErrorMessageDialog;
-import org.eclipse.core.resources.IResource;
-import org.eclipse.core.runtime.IProgressMonitor;
-import org.eclipse.core.runtime.Path;
-import org.eclipse.jdt.core.ICompilationUnit;
-import org.eclipse.jdt.core.IJavaElement;
-import org.eclipse.jdt.core.IType;
-import org.eclipse.jdt.ui.jarpackager.IJarExportRunnable;
-import org.eclipse.jdt.ui.jarpackager.JarPackageData;
-import org.eclipse.jface.operation.IRunnableWithProgress;
-import org.eclipse.swt.widgets.Display;
-import org.eclipse.ui.PlatformUI;
-
-/**
- * Methods for interacting with the jar file containing the
- * Mapper/Reducer/Driver classes for a MapReduce job.
- */
-
-public class JarModule implements IRunnableWithProgress {
-
- static Logger log = Logger.getLogger(JarModule.class.getName());
-
- private IResource resource;
-
- private File jarFile;
-
- public JarModule(IResource resource) {
- this.resource = resource;
- }
-
- public String getName() {
- return resource.getProject().getName() + "/" + resource.getName();
- }
-
- /**
- * Creates a JAR file containing the given resource (Java class with
- * main()) and all associated resources
- *
- * @param resource the resource
- * @return a file designing the created package
- */
- public void run(IProgressMonitor monitor) {
-
- log.fine("Build jar");
- JarPackageData jarrer = new JarPackageData();
-
- jarrer.setExportJavaFiles(true);
- jarrer.setExportClassFiles(true);
- jarrer.setExportOutputFolders(true);
- jarrer.setOverwrite(true);
-
- try {
- // IJavaProject project =
- // (IJavaProject) resource.getProject().getNature(JavaCore.NATURE_ID);
-
- // check this is the case before letting this method get called
- Object element = resource.getAdapter(IJavaElement.class);
- IType type = ((ICompilationUnit) element).findPrimaryType();
- jarrer.setManifestMainClass(type);
-
- // Create a temporary JAR file name
- File baseDir = Activator.getDefault().getStateLocation().toFile();
-
- String prefix =
- String.format("%s_%s-", resource.getProject().getName(), resource
- .getName());
- File jarFile = File.createTempFile(prefix, ".jar", baseDir);
- jarrer.setJarLocation(new Path(jarFile.getAbsolutePath()));
-
- jarrer.setElements(resource.getProject().members(IResource.FILE));
- IJarExportRunnable runnable =
- jarrer.createJarExportRunnable(Display.getDefault()
- .getActiveShell());
- runnable.run(monitor);
-
- this.jarFile = jarFile;
-
- } catch (Exception e) {
- e.printStackTrace();
- throw new RuntimeException(e);
- }
- }
-
- /**
- * Allow the retrieval of the resulting JAR file
- *
- * @return the generated JAR file
- */
- public File getJarFile() {
- return this.jarFile;
- }
-
- /**
- * Static way to create a JAR package for the given resource and showing a
- * progress bar
- *
- * @param resource
- * @return
- */
- public static File createJarPackage(IResource resource) {
-
- JarModule jarModule = new JarModule(resource);
- try {
- PlatformUI.getWorkbench().getProgressService().run(false, true,
- jarModule);
-
- } catch (Exception e) {
- e.printStackTrace();
- return null;
- }
-
- File jarFile = jarModule.getJarFile();
- if (jarFile == null) {
- ErrorMessageDialog.display("Run on Hadoop",
- "Unable to create or locate the JAR file for the Job");
- return null;
- }
-
- return jarFile;
- }
-
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/HadoopLocationWizard.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/HadoopLocationWizard.java
deleted file mode 100644
index 8fdd19b005b..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/HadoopLocationWizard.java
+++ /dev/null
@@ -1,972 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.servers;
-
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Set;
-import java.util.SortedMap;
-import java.util.TreeMap;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.eclipse.server.ConfProp;
-import org.apache.hadoop.eclipse.server.HadoopServer;
-import org.eclipse.jface.dialogs.IMessageProvider;
-import org.eclipse.jface.wizard.WizardPage;
-import org.eclipse.swt.SWT;
-import org.eclipse.swt.custom.ScrolledComposite;
-import org.eclipse.swt.events.ModifyEvent;
-import org.eclipse.swt.events.ModifyListener;
-import org.eclipse.swt.events.SelectionEvent;
-import org.eclipse.swt.events.SelectionListener;
-import org.eclipse.swt.graphics.Image;
-import org.eclipse.swt.layout.GridData;
-import org.eclipse.swt.layout.GridLayout;
-import org.eclipse.swt.widgets.Button;
-import org.eclipse.swt.widgets.Composite;
-import org.eclipse.swt.widgets.Control;
-import org.eclipse.swt.widgets.Display;
-import org.eclipse.swt.widgets.Event;
-import org.eclipse.swt.widgets.Group;
-import org.eclipse.swt.widgets.Label;
-import org.eclipse.swt.widgets.Listener;
-import org.eclipse.swt.widgets.TabFolder;
-import org.eclipse.swt.widgets.TabItem;
-import org.eclipse.swt.widgets.Text;
-
-/**
- * Wizard for editing the settings of a Hadoop location
- *
- * The wizard contains 3 tabs: General, Tunneling and Advanced. It edits
- * parameters of the location member which either a new location or a copy of
- * an existing registered location.
- */
-
-public class HadoopLocationWizard extends WizardPage {
-
- Image circle;
-
- /**
- * The location effectively edited by the wizard. This location is a copy
- * or a new one.
- */
- private HadoopServer location;
-
- /**
- * The original location being edited by the wizard (null if we create a
- * new instance).
- */
- private HadoopServer original;
-
- /**
- * New Hadoop location wizard
- */
- public HadoopLocationWizard() {
- super("Hadoop Server", "New Hadoop Location", null);
-
- this.original = null;
- this.location = new HadoopServer();
- this.location.setLocationName("");
- }
-
- /**
- * Constructor to edit the parameters of an existing Hadoop server
- *
- * @param server
- */
- public HadoopLocationWizard(HadoopServer server) {
- super("Create a new Hadoop location", "Edit Hadoop Location", null);
-
- this.original = server;
- this.location = new HadoopServer(server);
- }
-
- /**
- * Performs any actions appropriate in response to the user having pressed
- * the Finish button, or refuse if finishing now is not permitted.
- *
- * @return the created or updated Hadoop location
- */
-
- public HadoopServer performFinish() {
- try {
- if (this.original == null) {
- // New location
- Display.getDefault().syncExec(new Runnable() {
- public void run() {
- ServerRegistry.getInstance().addServer(
- HadoopLocationWizard.this.location);
- }
- });
- return this.location;
-
- } else {
- // Update location
- final String originalName = this.original.getLocationName();
- this.original.load(this.location);
-
- Display.getDefault().syncExec(new Runnable() {
- public void run() {
- ServerRegistry.getInstance().updateServer(originalName,
- HadoopLocationWizard.this.location);
- }
- });
- return this.original;
-
- }
- } catch (Exception e) {
- e.printStackTrace();
- setMessage("Invalid server location values", IMessageProvider.ERROR);
- return null;
- }
- }
-
- /**
- * Validates the current Hadoop location settings (look for Hadoop
- * installation directory).
- *
- */
- private void testLocation() {
- setMessage("Not implemented yet", IMessageProvider.WARNING);
- }
-
- /**
- * Location is not complete (and finish button not available) until a host
- * name is specified.
- *
- * @inheritDoc
- */
- @Override
- public boolean isPageComplete() {
-
- {
- String locName = location.getConfProp(ConfProp.PI_LOCATION_NAME);
- if ((locName == null) || (locName.length() == 0)
- || locName.contains("/")) {
-
- setMessage("Bad location name: "
- + "the location name should not contain "
- + "any character prohibited in a file name.", WARNING);
-
- return false;
- }
- }
-
- {
- String master = location.getConfProp(ConfProp.PI_JOB_TRACKER_HOST);
- if ((master == null) || (master.length() == 0)) {
-
- setMessage("Bad master host name: "
- + "the master host name refers to the machine "
- + "that runs the Job tracker.", WARNING);
-
- return false;
- }
- }
-
- {
- String jobTracker = location.getConfProp(ConfProp.JOB_TRACKER_URI);
- String[] strs = jobTracker.split(":");
- boolean ok = (strs.length == 2);
- if (ok) {
- try {
- int port = Integer.parseInt(strs[1]);
- ok = (port >= 0) && (port < 65536);
- } catch (NumberFormatException nfe) {
- ok = false;
- }
- }
- if (!ok) {
- setMessage("The job tracker information ("
- + ConfProp.JOB_TRACKER_URI.name + ") is invalid. "
- + "This usually looks like \"host:port\"", WARNING);
- return false;
- }
- }
-
- {
- String fsDefaultURI = location.getConfProp(ConfProp.FS_DEFAULT_URI);
- try {
- URI uri = new URI(fsDefaultURI);
- } catch (URISyntaxException e) {
-
- setMessage("The default file system URI is invalid. "
- + "This usually looks like \"hdfs://host:port/\" "
- + "or \"file:///dir/\"", WARNING);
- }
- }
-
- setMessage("Define the location of a Hadoop infrastructure "
- + "for running MapReduce applications.");
- return true;
- }
-
- /**
- * Create the wizard
- */
- /* @inheritDoc */
- public void createControl(Composite parent) {
- setTitle("Define Hadoop location");
- setDescription("Define the location of a Hadoop infrastructure "
- + "for running MapReduce applications.");
-
- Composite panel = new Composite(parent, SWT.FILL);
- GridLayout glayout = new GridLayout(2, false);
- panel.setLayout(glayout);
-
- TabMediator mediator = new TabMediator(panel);
- {
- GridData gdata = new GridData(GridData.FILL_BOTH);
- gdata.horizontalSpan = 2;
- mediator.folder.setLayoutData(gdata);
- }
- this.setControl(panel /* mediator.folder */);
- {
- final Button btn = new Button(panel, SWT.NONE);
- btn.setText("&Load from file");
- btn.setEnabled(false);
- btn.setToolTipText("Not yet implemented");
- btn.addListener(SWT.Selection, new Listener() {
- public void handleEvent(Event e) {
- // TODO
- }
- });
- }
- {
- final Button validate = new Button(panel, SWT.NONE);
- validate.setText("&Validate location");
- validate.setEnabled(false);
- validate.setToolTipText("Not yet implemented");
- validate.addListener(SWT.Selection, new Listener() {
- public void handleEvent(Event e) {
- testLocation();
- }
- });
- }
- }
-
- private interface TabListener {
- void notifyChange(ConfProp prop, String propValue);
- }
-
- /*
- * Mediator pattern to keep tabs synchronized with each other and with the
- * location state.
- */
-
- private class TabMediator {
- TabFolder folder;
-
- private Set tabs = new HashSet();
-
- TabMediator(Composite parent) {
- folder = new TabFolder(parent, SWT.NONE);
- tabs.add(new TabMain(this));
- tabs.add(new TabAdvanced(this));
- }
-
- /**
- * Access to current configuration settings
- *
- * @param propName the property name
- * @return the current property value
- */
- String get(String propName) {
- return location.getConfProp(propName);
- }
-
- String get(ConfProp prop) {
- return location.getConfProp(prop);
- }
-
- /**
- * Implements change notifications from any tab: update the location
- * state and other tabs
- *
- * @param source origin of the notification (one of the tree tabs)
- * @param propName modified property
- * @param propValue new value
- */
- void notifyChange(TabListener source, final ConfProp prop,
- final String propValue) {
- // Ignore notification when no change
- String oldValue = location.getConfProp(prop);
- if ((oldValue != null) && oldValue.equals(propValue))
- return;
-
- location.setConfProp(prop, propValue);
- Display.getDefault().syncExec(new Runnable() {
- public void run() {
- getContainer().updateButtons();
- }
- });
-
- this.fireChange(source, prop, propValue);
-
- /*
- * Now we deal with dependencies between settings
- */
- final String jobTrackerHost =
- location.getConfProp(ConfProp.PI_JOB_TRACKER_HOST);
- final String jobTrackerPort =
- location.getConfProp(ConfProp.PI_JOB_TRACKER_PORT);
- final String nameNodeHost =
- location.getConfProp(ConfProp.PI_NAME_NODE_HOST);
- final String nameNodePort =
- location.getConfProp(ConfProp.PI_NAME_NODE_PORT);
- final boolean colocate =
- location.getConfProp(ConfProp.PI_COLOCATE_MASTERS)
- .equalsIgnoreCase("yes");
- final String jobTrackerURI =
- location.getConfProp(ConfProp.JOB_TRACKER_URI);
- final String fsDefaultURI =
- location.getConfProp(ConfProp.FS_DEFAULT_URI);
- final String socksServerURI =
- location.getConfProp(ConfProp.SOCKS_SERVER);
- final boolean socksProxyEnable =
- location.getConfProp(ConfProp.PI_SOCKS_PROXY_ENABLE)
- .equalsIgnoreCase("yes");
- final String socksProxyHost =
- location.getConfProp(ConfProp.PI_SOCKS_PROXY_HOST);
- final String socksProxyPort =
- location.getConfProp(ConfProp.PI_SOCKS_PROXY_PORT);
-
- Display.getDefault().syncExec(new Runnable() {
- public void run() {
- switch (prop) {
- case PI_JOB_TRACKER_HOST: {
- if (colocate)
- notifyChange(null, ConfProp.PI_NAME_NODE_HOST,
- jobTrackerHost);
- String newJobTrackerURI =
- String.format("%s:%s", jobTrackerHost, jobTrackerPort);
- notifyChange(null, ConfProp.JOB_TRACKER_URI, newJobTrackerURI);
- break;
- }
- case PI_JOB_TRACKER_PORT: {
- String newJobTrackerURI =
- String.format("%s:%s", jobTrackerHost, jobTrackerPort);
- notifyChange(null, ConfProp.JOB_TRACKER_URI, newJobTrackerURI);
- break;
- }
- case PI_NAME_NODE_HOST: {
- String newHDFSURI =
- String.format("hdfs://%s:%s/", nameNodeHost, nameNodePort);
- notifyChange(null, ConfProp.FS_DEFAULT_URI, newHDFSURI);
-
- // Break colocation if someone force the DFS Master
- if (!colocate && !nameNodeHost.equals(jobTrackerHost))
- notifyChange(null, ConfProp.PI_COLOCATE_MASTERS, "no");
- break;
- }
- case PI_NAME_NODE_PORT: {
- String newHDFSURI =
- String.format("hdfs://%s:%s/", nameNodeHost, nameNodePort);
- notifyChange(null, ConfProp.FS_DEFAULT_URI, newHDFSURI);
- break;
- }
- case PI_SOCKS_PROXY_HOST: {
- String newSocksProxyURI =
- String.format("%s:%s", socksProxyHost, socksProxyPort);
- notifyChange(null, ConfProp.SOCKS_SERVER, newSocksProxyURI);
- break;
- }
- case PI_SOCKS_PROXY_PORT: {
- String newSocksProxyURI =
- String.format("%s:%s", socksProxyHost, socksProxyPort);
- notifyChange(null, ConfProp.SOCKS_SERVER, newSocksProxyURI);
- break;
- }
- case JOB_TRACKER_URI: {
- String[] strs = jobTrackerURI.split(":", 2);
- String host = strs[0];
- String port = (strs.length == 2) ? strs[1] : "";
- notifyChange(null, ConfProp.PI_JOB_TRACKER_HOST, host);
- notifyChange(null, ConfProp.PI_JOB_TRACKER_PORT, port);
- break;
- }
- case FS_DEFAULT_URI: {
- try {
- URI uri = new URI(fsDefaultURI);
- if (uri.getScheme().equals("hdfs")) {
- String host = uri.getHost();
- String port = Integer.toString(uri.getPort());
- notifyChange(null, ConfProp.PI_NAME_NODE_HOST, host);
- notifyChange(null, ConfProp.PI_NAME_NODE_PORT, port);
- }
- } catch (URISyntaxException use) {
- // Ignore the update!
- }
- break;
- }
- case SOCKS_SERVER: {
- String[] strs = socksServerURI.split(":", 2);
- String host = strs[0];
- String port = (strs.length == 2) ? strs[1] : "";
- notifyChange(null, ConfProp.PI_SOCKS_PROXY_HOST, host);
- notifyChange(null, ConfProp.PI_SOCKS_PROXY_PORT, port);
- break;
- }
- case PI_COLOCATE_MASTERS: {
- if (colocate)
- notifyChange(null, ConfProp.PI_NAME_NODE_HOST,
- jobTrackerHost);
- break;
- }
- case PI_SOCKS_PROXY_ENABLE: {
- if (socksProxyEnable) {
- notifyChange(null, ConfProp.SOCKET_FACTORY_DEFAULT,
- "org.apache.hadoop.net.SocksSocketFactory");
- } else {
- notifyChange(null, ConfProp.SOCKET_FACTORY_DEFAULT,
- "org.apache.hadoop.net.StandardSocketFactory");
- }
- break;
- }
- }
- }
- });
-
- }
-
- /**
- * Change notifications on properties (by name). A property might not be
- * reflected as a ConfProp enum. If it is, the notification is forwarded
- * to the ConfProp notifyChange method. If not, it is processed here.
- *
- * @param source
- * @param propName
- * @param propValue
- */
- void notifyChange(TabListener source, String propName, String propValue) {
-
- ConfProp prop = ConfProp.getByName(propName);
- if (prop != null)
- notifyChange(source, prop, propValue);
-
- location.setConfProp(propName, propValue);
- }
-
- /**
- * Broadcast a property change to all registered tabs. If a tab is
- * identified as the source of the change, this tab will not be notified.
- *
- * @param source TODO
- * @param prop
- * @param value
- */
- private void fireChange(TabListener source, ConfProp prop, String value) {
- for (TabListener tab : tabs) {
- if (tab != source)
- tab.notifyChange(prop, value);
- }
- }
-
- }
-
- /**
- * Create a SWT Text component for the given {@link ConfProp} text
- * configuration property.
- *
- * @param listener
- * @param parent
- * @param prop
- * @return
- */
- private Text createConfText(ModifyListener listener, Composite parent,
- ConfProp prop) {
-
- Text text = new Text(parent, SWT.SINGLE | SWT.BORDER);
- GridData data = new GridData(GridData.FILL_HORIZONTAL);
- text.setLayoutData(data);
- text.setData("hProp", prop);
- text.setText(location.getConfProp(prop));
- text.addModifyListener(listener);
-
- return text;
- }
-
- /**
- * Create a SWT Checked Button component for the given {@link ConfProp}
- * boolean configuration property.
- *
- * @param listener
- * @param parent
- * @param prop
- * @return
- */
- private Button createConfCheckButton(SelectionListener listener,
- Composite parent, ConfProp prop, String text) {
-
- Button button = new Button(parent, SWT.CHECK);
- button.setText(text);
- button.setData("hProp", prop);
- button.setSelection(location.getConfProp(prop).equalsIgnoreCase("yes"));
- button.addSelectionListener(listener);
-
- return button;
- }
-
- /**
- * Create editor entry for the given configuration property. The editor is
- * a couple (Label, Text).
- *
- * @param listener the listener to trigger on property change
- * @param parent the SWT parent container
- * @param prop the property to create an editor for
- * @param labelText a label (null will defaults to the property name)
- *
- * @return a SWT Text field
- */
- private Text createConfLabelText(ModifyListener listener,
- Composite parent, ConfProp prop, String labelText) {
-
- Label label = new Label(parent, SWT.NONE);
- if (labelText == null)
- labelText = prop.name;
- label.setText(labelText);
-
- return createConfText(listener, parent, prop);
- }
-
- /**
- * Create an editor entry for the given configuration name
- *
- * @param listener the listener to trigger on property change
- * @param parent the SWT parent container
- * @param propName the name of the property to create an editor for
- * @param labelText a label (null will defaults to the property name)
- *
- * @return a SWT Text field
- */
- private Text createConfNameEditor(ModifyListener listener,
- Composite parent, String propName, String labelText) {
-
- {
- ConfProp prop = ConfProp.getByName(propName);
- if (prop != null)
- return createConfLabelText(listener, parent, prop, labelText);
- }
-
- Label label = new Label(parent, SWT.NONE);
- if (labelText == null)
- labelText = propName;
- label.setText(labelText);
-
- Text text = new Text(parent, SWT.SINGLE | SWT.BORDER);
- GridData data = new GridData(GridData.FILL_HORIZONTAL);
- text.setLayoutData(data);
- text.setData("hPropName", propName);
- text.setText(location.getConfProp(propName));
- text.addModifyListener(listener);
-
- return text;
- }
-
- /**
- * Main parameters of the Hadoop location:
- * host and port of the Map/Reduce master (Job tracker)
- * host and port of the DFS master (Name node)
- * SOCKS proxy
- */
- private class TabMain implements TabListener, ModifyListener,
- SelectionListener {
-
- TabMediator mediator;
-
- Text locationName;
-
- Text textJTHost;
-
- Text textNNHost;
-
- Button colocateMasters;
-
- Text textJTPort;
-
- Text textNNPort;
-
- Text userName;
-
- Button useSocksProxy;
-
- Text socksProxyHost;
-
- Text socksProxyPort;
-
- TabMain(TabMediator mediator) {
- this.mediator = mediator;
- TabItem tab = new TabItem(mediator.folder, SWT.NONE);
- tab.setText("General");
- tab.setToolTipText("General location parameters");
- tab.setImage(circle);
- tab.setControl(createControl(mediator.folder));
- }
-
- private Control createControl(Composite parent) {
-
- Composite panel = new Composite(parent, SWT.FILL);
- panel.setLayout(new GridLayout(2, false));
-
- GridData data;
-
- /*
- * Location name
- */
- {
- Composite subpanel = new Composite(panel, SWT.FILL);
- subpanel.setLayout(new GridLayout(2, false));
- data = new GridData();
- data.horizontalSpan = 2;
- data.horizontalAlignment = SWT.FILL;
- subpanel.setLayoutData(data);
-
- locationName =
- createConfLabelText(this, subpanel, ConfProp.PI_LOCATION_NAME,
- "&Location name:");
- }
-
- /*
- * Map/Reduce group
- */
- {
- Group groupMR = new Group(panel, SWT.SHADOW_NONE);
- groupMR.setText("Map/Reduce Master");
- groupMR.setToolTipText("Address of the Map/Reduce master node "
- + "(the Job Tracker).");
- GridLayout layout = new GridLayout(2, false);
- groupMR.setLayout(layout);
- data = new GridData();
- data.verticalAlignment = SWT.FILL;
- data.horizontalAlignment = SWT.CENTER;
- data.widthHint = 250;
- groupMR.setLayoutData(data);
-
- // Job Tracker host
- Label label = new Label(groupMR, SWT.NONE);
- label.setText("Host:");
- data =
- new GridData(GridData.BEGINNING, GridData.CENTER, false, true);
- label.setLayoutData(data);
-
- textJTHost =
- createConfText(this, groupMR, ConfProp.PI_JOB_TRACKER_HOST);
- data = new GridData(GridData.FILL, GridData.CENTER, true, true);
- textJTHost.setLayoutData(data);
-
- // Job Tracker port
- label = new Label(groupMR, SWT.NONE);
- label.setText("Port:");
- data =
- new GridData(GridData.BEGINNING, GridData.CENTER, false, true);
- label.setLayoutData(data);
-
- textJTPort =
- createConfText(this, groupMR, ConfProp.PI_JOB_TRACKER_PORT);
- data = new GridData(GridData.FILL, GridData.CENTER, true, true);
- textJTPort.setLayoutData(data);
- }
-
- /*
- * DFS group
- */
- {
- Group groupDFS = new Group(panel, SWT.SHADOW_NONE);
- groupDFS.setText("DFS Master");
- groupDFS.setToolTipText("Address of the Distributed FileSystem "
- + "master node (the Name Node).");
- GridLayout layout = new GridLayout(2, false);
- groupDFS.setLayout(layout);
- data = new GridData();
- data.horizontalAlignment = SWT.CENTER;
- data.widthHint = 250;
- groupDFS.setLayoutData(data);
-
- colocateMasters =
- createConfCheckButton(this, groupDFS,
- ConfProp.PI_COLOCATE_MASTERS, "Use M/R Master host");
- data = new GridData();
- data.horizontalSpan = 2;
- colocateMasters.setLayoutData(data);
-
- // Job Tracker host
- Label label = new Label(groupDFS, SWT.NONE);
- data = new GridData();
- label.setText("Host:");
- label.setLayoutData(data);
-
- textNNHost =
- createConfText(this, groupDFS, ConfProp.PI_NAME_NODE_HOST);
-
- // Job Tracker port
- label = new Label(groupDFS, SWT.NONE);
- data = new GridData();
- label.setText("Port:");
- label.setLayoutData(data);
-
- textNNPort =
- createConfText(this, groupDFS, ConfProp.PI_NAME_NODE_PORT);
- }
-
- {
- Composite subpanel = new Composite(panel, SWT.FILL);
- subpanel.setLayout(new GridLayout(2, false));
- data = new GridData();
- data.horizontalSpan = 2;
- data.horizontalAlignment = SWT.FILL;
- subpanel.setLayoutData(data);
-
- userName =
- createConfLabelText(this, subpanel, ConfProp.PI_USER_NAME,
- "&User name:");
- }
-
- // SOCKS proxy group
- {
- Group groupSOCKS = new Group(panel, SWT.SHADOW_NONE);
- groupSOCKS.setText("SOCKS proxy");
- groupSOCKS.setToolTipText("Address of the SOCKS proxy to use "
- + "to connect to the infrastructure.");
- GridLayout layout = new GridLayout(2, false);
- groupSOCKS.setLayout(layout);
- data = new GridData();
- data.horizontalAlignment = SWT.CENTER;
- data.horizontalSpan = 2;
- data.widthHint = 250;
- groupSOCKS.setLayoutData(data);
-
- useSocksProxy =
- createConfCheckButton(this, groupSOCKS,
- ConfProp.PI_SOCKS_PROXY_ENABLE, "Enable SOCKS proxy");
- data = new GridData();
- data.horizontalSpan = 2;
- useSocksProxy.setLayoutData(data);
-
- // SOCKS proxy host
- Label label = new Label(groupSOCKS, SWT.NONE);
- data = new GridData();
- label.setText("Host:");
- label.setLayoutData(data);
-
- socksProxyHost =
- createConfText(this, groupSOCKS, ConfProp.PI_SOCKS_PROXY_HOST);
-
- // SOCKS proxy port
- label = new Label(groupSOCKS, SWT.NONE);
- data = new GridData();
- label.setText("Port:");
- label.setLayoutData(data);
-
- socksProxyPort =
- createConfText(this, groupSOCKS, ConfProp.PI_SOCKS_PROXY_PORT);
- }
-
- // Update the state of all widgets according to the current values!
- reloadConfProp(ConfProp.PI_COLOCATE_MASTERS);
- reloadConfProp(ConfProp.PI_SOCKS_PROXY_ENABLE);
- reloadConfProp(ConfProp.PI_JOB_TRACKER_HOST);
-
- return panel;
- }
-
- /**
- * Reload the given configuration property value
- *
- * @param prop
- */
- private void reloadConfProp(ConfProp prop) {
- this.notifyChange(prop, location.getConfProp(prop));
- }
-
- public void notifyChange(ConfProp prop, String propValue) {
- switch (prop) {
- case PI_JOB_TRACKER_HOST: {
- textJTHost.setText(propValue);
- break;
- }
- case PI_JOB_TRACKER_PORT: {
- textJTPort.setText(propValue);
- break;
- }
- case PI_LOCATION_NAME: {
- locationName.setText(propValue);
- break;
- }
- case PI_USER_NAME: {
- userName.setText(propValue);
- break;
- }
- case PI_COLOCATE_MASTERS: {
- if (colocateMasters != null) {
- boolean colocate = propValue.equalsIgnoreCase("yes");
- colocateMasters.setSelection(colocate);
- if (textNNHost != null) {
- textNNHost.setEnabled(!colocate);
- }
- }
- break;
- }
- case PI_NAME_NODE_HOST: {
- textNNHost.setText(propValue);
- break;
- }
- case PI_NAME_NODE_PORT: {
- textNNPort.setText(propValue);
- break;
- }
- case PI_SOCKS_PROXY_ENABLE: {
- if (useSocksProxy != null) {
- boolean useProxy = propValue.equalsIgnoreCase("yes");
- useSocksProxy.setSelection(useProxy);
- if (socksProxyHost != null)
- socksProxyHost.setEnabled(useProxy);
- if (socksProxyPort != null)
- socksProxyPort.setEnabled(useProxy);
- }
- break;
- }
- case PI_SOCKS_PROXY_HOST: {
- socksProxyHost.setText(propValue);
- break;
- }
- case PI_SOCKS_PROXY_PORT: {
- socksProxyPort.setText(propValue);
- break;
- }
- }
- }
-
- /* @inheritDoc */
- public void modifyText(ModifyEvent e) {
- final Text text = (Text) e.widget;
- final ConfProp prop = (ConfProp) text.getData("hProp");
- Display.getDefault().syncExec(new Runnable() {
- public void run() {
- mediator.notifyChange(TabMain.this, prop, text.getText());
- }
- });
- }
-
- /* @inheritDoc */
- public void widgetDefaultSelected(SelectionEvent e) {
- this.widgetSelected(e);
- }
-
- /* @inheritDoc */
- public void widgetSelected(SelectionEvent e) {
- final Button button = (Button) e.widget;
- final ConfProp prop = (ConfProp) button.getData("hProp");
-
- Display.getDefault().syncExec(new Runnable() {
- public void run() {
- // We want to receive the update also!
- mediator.notifyChange(null, prop, button.getSelection() ? "yes"
- : "no");
- }
- });
- }
-
- }
-
- private class TabAdvanced implements TabListener, ModifyListener {
- TabMediator mediator;
-
- private Composite panel;
-
- private Map textMap = new TreeMap();
-
- TabAdvanced(TabMediator mediator) {
- this.mediator = mediator;
- TabItem tab = new TabItem(mediator.folder, SWT.NONE);
- tab.setText("Advanced parameters");
- tab.setToolTipText("Access to advanced Hadoop parameters");
- tab.setImage(circle);
- tab.setControl(createControl(mediator.folder));
-
- }
-
- private Control createControl(Composite parent) {
- ScrolledComposite sc =
- new ScrolledComposite(parent, SWT.BORDER | SWT.H_SCROLL
- | SWT.V_SCROLL);
-
- panel = new Composite(sc, SWT.NONE);
- sc.setContent(panel);
-
- sc.setExpandHorizontal(true);
- sc.setExpandVertical(true);
-
- sc.setMinSize(640, 480);
-
- GridLayout layout = new GridLayout();
- layout.numColumns = 2;
- layout.makeColumnsEqualWidth = false;
- panel.setLayout(layout);
- panel.setLayoutData(new GridData(GridData.FILL, GridData.FILL, true,
- true, 1, 1));
-
- // Sort by property name
- Configuration config = location.getConfiguration();
- SortedMap map = new TreeMap();
- Iterator> it = config.iterator();
- while (it.hasNext()) {
- Entry entry = it.next();
- map.put(entry.getKey(), entry.getValue());
- }
-
- for (Entry entry : map.entrySet()) {
- Text text = createConfNameEditor(this, panel, entry.getKey(), null);
- textMap.put(entry.getKey(), text);
- }
-
- sc.setMinSize(panel.computeSize(SWT.DEFAULT, SWT.DEFAULT));
-
- return sc;
- }
-
- public void notifyChange(ConfProp prop, final String propValue) {
- Text text = textMap.get(prop.name);
- text.setText(propValue);
- }
-
- public void modifyText(ModifyEvent e) {
- final Text text = (Text) e.widget;
- Object hProp = text.getData("hProp");
- final ConfProp prop = (hProp != null) ? (ConfProp) hProp : null;
- Object hPropName = text.getData("hPropName");
- final String propName =
- (hPropName != null) ? (String) hPropName : null;
-
- Display.getDefault().syncExec(new Runnable() {
- public void run() {
- if (prop != null)
- mediator.notifyChange(TabAdvanced.this, prop, text.getText());
- else
- mediator
- .notifyChange(TabAdvanced.this, propName, text.getText());
- }
- });
- }
- }
-
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/HadoopServerSelectionListContentProvider.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/HadoopServerSelectionListContentProvider.java
deleted file mode 100644
index 17f4acecf40..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/HadoopServerSelectionListContentProvider.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.servers;
-
-import org.apache.hadoop.eclipse.server.HadoopServer;
-import org.eclipse.jface.viewers.IContentProvider;
-import org.eclipse.jface.viewers.ILabelProviderListener;
-import org.eclipse.jface.viewers.IStructuredContentProvider;
-import org.eclipse.jface.viewers.ITableLabelProvider;
-import org.eclipse.jface.viewers.Viewer;
-import org.eclipse.swt.graphics.Image;
-
-/**
- * Provider that enables selection of a predefined Hadoop server.
- */
-
-public class HadoopServerSelectionListContentProvider implements
- IContentProvider, ITableLabelProvider, IStructuredContentProvider {
- public void dispose() {
-
- }
-
- public void inputChanged(Viewer viewer, Object oldInput, Object newInput) {
-
- }
-
- public Image getColumnImage(Object element, int columnIndex) {
- return null;
- }
-
- public String getColumnText(Object element, int columnIndex) {
- if (element instanceof HadoopServer) {
- HadoopServer location = (HadoopServer) element;
- if (columnIndex == 0) {
- return location.getLocationName();
-
- } else if (columnIndex == 1) {
- return location.getMasterHostName();
- }
- }
-
- return element.toString();
- }
-
- public void addListener(ILabelProviderListener listener) {
-
- }
-
- public boolean isLabelProperty(Object element, String property) {
- return false;
- }
-
- public void removeListener(ILabelProviderListener listener) {
-
- }
-
- public Object[] getElements(Object inputElement) {
- return ServerRegistry.getInstance().getServers().toArray();
- }
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/IHadoopServerListener.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/IHadoopServerListener.java
deleted file mode 100644
index 3c65173f9f3..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/IHadoopServerListener.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.servers;
-
-import org.apache.hadoop.eclipse.server.HadoopServer;
-
-/**
- * Interface for monitoring server changes
- */
-public interface IHadoopServerListener {
- void serverChanged(HadoopServer location, int type);
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/RunOnHadoopWizard.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/RunOnHadoopWizard.java
deleted file mode 100644
index a08051088fa..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/RunOnHadoopWizard.java
+++ /dev/null
@@ -1,355 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.servers;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.hadoop.eclipse.Activator;
-import org.apache.hadoop.eclipse.ErrorMessageDialog;
-import org.apache.hadoop.eclipse.server.HadoopServer;
-import org.apache.hadoop.eclipse.server.JarModule;
-import org.apache.hadoop.mapred.JobConf;
-import org.eclipse.core.resources.IFile;
-import org.eclipse.core.runtime.CoreException;
-import org.eclipse.core.runtime.IPath;
-import org.eclipse.core.runtime.IProgressMonitor;
-import org.eclipse.core.runtime.Path;
-import org.eclipse.debug.core.ILaunchConfigurationWorkingCopy;
-import org.eclipse.jdt.launching.IJavaLaunchConfigurationConstants;
-import org.eclipse.jdt.launching.IRuntimeClasspathEntry;
-import org.eclipse.jdt.launching.JavaRuntime;
-import org.eclipse.jface.viewers.TableViewer;
-import org.eclipse.jface.wizard.Wizard;
-import org.eclipse.jface.wizard.WizardPage;
-import org.eclipse.swt.SWT;
-import org.eclipse.swt.events.SelectionEvent;
-import org.eclipse.swt.events.SelectionListener;
-import org.eclipse.swt.layout.FillLayout;
-import org.eclipse.swt.layout.GridData;
-import org.eclipse.swt.layout.GridLayout;
-import org.eclipse.swt.widgets.Button;
-import org.eclipse.swt.widgets.Composite;
-import org.eclipse.swt.widgets.Label;
-import org.eclipse.swt.widgets.Table;
-import org.eclipse.swt.widgets.TableColumn;
-
-/**
- * Wizard for publishing a job to a Hadoop server.
- */
-
-public class RunOnHadoopWizard extends Wizard {
-
- private MainWizardPage mainPage;
-
- private HadoopLocationWizard createNewPage;
-
- /**
- * The file resource (containing a main()) to run on the Hadoop location
- */
- private IFile resource;
-
- /**
- * The launch configuration to update
- */
- private ILaunchConfigurationWorkingCopy iConf;
-
- private IProgressMonitor progressMonitor;
-
- public RunOnHadoopWizard(IFile resource,
- ILaunchConfigurationWorkingCopy iConf) {
- this.resource = resource;
- this.iConf = iConf;
- setForcePreviousAndNextButtons(true);
- setNeedsProgressMonitor(true);
- setWindowTitle("Run on Hadoop");
- }
-
- /**
- * This wizard contains 2 pages:
- * the first one lets the user choose an already existing location
- * the second one allows the user to create a new location, in case it
- * does not already exist
- */
- /* @inheritDoc */
- @Override
- public void addPages() {
- addPage(this.mainPage = new MainWizardPage());
- addPage(this.createNewPage = new HadoopLocationWizard());
- }
-
- /**
- * Performs any actions appropriate in response to the user having pressed
- * the Finish button, or refuse if finishing now is not permitted.
- */
- /* @inheritDoc */
- @Override
- public boolean performFinish() {
-
- /*
- * Create a new location or get an existing one
- */
- HadoopServer location = null;
- if (mainPage.createNew.getSelection()) {
- location = createNewPage.performFinish();
-
- } else if (mainPage.table.getSelection().length == 1) {
- location = (HadoopServer) mainPage.table.getSelection()[0].getData();
- }
-
- if (location == null)
- return false;
-
- /*
- * Get the base directory of the plug-in for storing configurations and
- * JARs
- */
- File baseDir = Activator.getDefault().getStateLocation().toFile();
-
- // Package the Job into a JAR
- File jarFile = JarModule.createJarPackage(resource);
- if (jarFile == null) {
- ErrorMessageDialog.display("Run on Hadoop",
- "Unable to create or locate the JAR file for the Job");
- return false;
- }
-
- /*
- * Generate a temporary Hadoop configuration directory and add it to the
- * classpath of the launch configuration
- */
-
- File confDir;
- try {
- confDir = File.createTempFile("hadoop-conf-", "", baseDir);
- confDir.delete();
- confDir.mkdirs();
- if (!confDir.isDirectory()) {
- ErrorMessageDialog.display("Run on Hadoop",
- "Cannot create temporary directory: " + confDir);
- return false;
- }
- } catch (IOException ioe) {
- ioe.printStackTrace();
- return false;
- }
-
- // Prepare the Hadoop configuration
- JobConf conf = new JobConf(location.getConfiguration());
- conf.setJar(jarFile.getAbsolutePath());
-
- // Write it to the disk file
- try {
- // File confFile = File.createTempFile("hadoop-site-", ".xml",
- // confDir);
- File confFile = new File(confDir, "hadoop-site.xml");
- FileOutputStream fos = new FileOutputStream(confFile);
- conf.writeXml(fos);
- fos.close();
-
- } catch (IOException ioe) {
- ioe.printStackTrace();
- return false;
- }
-
- // Setup the Launch class path
- List classPath;
- try {
- classPath =
- iConf.getAttribute(
- IJavaLaunchConfigurationConstants.ATTR_CLASSPATH,
- new ArrayList());
- IPath confIPath = new Path(confDir.getAbsolutePath());
- IRuntimeClasspathEntry cpEntry =
- JavaRuntime.newArchiveRuntimeClasspathEntry(confIPath);
- classPath.add(0, cpEntry.getMemento());
- iConf.setAttribute(IJavaLaunchConfigurationConstants.ATTR_CLASSPATH,
- classPath);
-
- } catch (CoreException e) {
- e.printStackTrace();
- return false;
- }
-
- // location.runResource(resource, progressMonitor);
- return true;
- }
-
- private void refreshButtons() {
- getContainer().updateButtons();
- }
-
- /**
- * Allows finish when an existing server is selected or when a new server
- * location is defined
- */
- /* @inheritDoc */
- @Override
- public boolean canFinish() {
- if (mainPage != null)
- return mainPage.canFinish();
- return false;
- }
-
- /**
- * This is the main page of the wizard. It allows the user either to choose
- * an already existing location or to indicate he wants to create a new
- * location.
- */
- public class MainWizardPage extends WizardPage {
-
- private Button createNew;
-
- private Table table;
-
- private Button chooseExisting;
-
- public MainWizardPage() {
- super("Select or define server to run on");
- setTitle("Select Hadoop location");
- setDescription("Select a Hadoop location to run on.");
- }
-
- /* @inheritDoc */
- @Override
- public boolean canFlipToNextPage() {
- return createNew.getSelection();
- }
-
- /* @inheritDoc */
- public void createControl(Composite parent) {
- Composite panel = new Composite(parent, SWT.NONE);
- panel.setLayout(new GridLayout(1, false));
-
- // Label
- Label label = new Label(panel, SWT.NONE);
- label.setText("Select a Hadoop Server to run on.");
- GridData gData = new GridData(GridData.FILL_BOTH);
- gData.grabExcessVerticalSpace = false;
- label.setLayoutData(gData);
-
- // Create location button
- createNew = new Button(panel, SWT.RADIO);
- createNew.setText("Define a new Hadoop server location");
- createNew.setLayoutData(gData);
- createNew.addSelectionListener(new SelectionListener() {
- public void widgetDefaultSelected(SelectionEvent e) {
- }
-
- public void widgetSelected(SelectionEvent e) {
- setPageComplete(true);
- RunOnHadoopWizard.this.refreshButtons();
- }
- });
- createNew.setSelection(true);
-
- // Select existing location button
- chooseExisting = new Button(panel, SWT.RADIO);
- chooseExisting
- .setText("Choose an existing server from the list below");
- chooseExisting.setLayoutData(gData);
- chooseExisting.addSelectionListener(new SelectionListener() {
- public void widgetDefaultSelected(SelectionEvent e) {
- }
-
- public void widgetSelected(SelectionEvent e) {
- if (chooseExisting.getSelection()
- && (table.getSelectionCount() == 0)) {
- if (table.getItems().length > 0) {
- table.setSelection(0);
- }
- }
- RunOnHadoopWizard.this.refreshButtons();
- }
- });
-
- // Table of existing locations
- Composite serverListPanel = new Composite(panel, SWT.FILL);
- gData = new GridData(GridData.FILL_BOTH);
- gData.horizontalSpan = 1;
- serverListPanel.setLayoutData(gData);
-
- FillLayout layout = new FillLayout();
- layout.marginHeight = layout.marginWidth = 12;
- serverListPanel.setLayout(layout);
-
- table =
- new Table(serverListPanel, SWT.BORDER | SWT.H_SCROLL
- | SWT.V_SCROLL | SWT.FULL_SELECTION);
- table.setHeaderVisible(true);
- table.setLinesVisible(true);
-
- TableColumn nameColumn = new TableColumn(table, SWT.LEFT);
- nameColumn.setText("Location");
- nameColumn.setWidth(450);
-
- TableColumn hostColumn = new TableColumn(table, SWT.LEFT);
- hostColumn.setText("Master host name");
- hostColumn.setWidth(250);
-
- // If the user select one entry, switch to "chooseExisting"
- table.addSelectionListener(new SelectionListener() {
- public void widgetDefaultSelected(SelectionEvent e) {
- }
-
- public void widgetSelected(SelectionEvent e) {
- chooseExisting.setSelection(true);
- createNew.setSelection(false);
- setPageComplete(table.getSelectionCount() == 1);
- RunOnHadoopWizard.this.refreshButtons();
- }
- });
-
- TableViewer viewer = new TableViewer(table);
- HadoopServerSelectionListContentProvider provider =
- new HadoopServerSelectionListContentProvider();
- viewer.setContentProvider(provider);
- viewer.setLabelProvider(provider);
- viewer.setInput(new Object());
- // don't care, get from singleton server registry
-
- this.setControl(panel);
- }
-
- /**
- * Returns whether this page state allows the Wizard to finish or not
- *
- * @return can the wizard finish or not?
- */
- public boolean canFinish() {
- if (!isControlCreated())
- return false;
-
- if (this.createNew.getSelection())
- return getNextPage().isPageComplete();
-
- return this.chooseExisting.getSelection();
- }
- }
-
- /**
- * @param progressMonitor
- */
- public void setProgressMonitor(IProgressMonitor progressMonitor) {
- this.progressMonitor = progressMonitor;
- }
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/ServerRegistry.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/ServerRegistry.java
deleted file mode 100644
index 30a37cd439f..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/ServerRegistry.java
+++ /dev/null
@@ -1,203 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.servers;
-
-import java.io.File;
-import java.io.FilenameFilter;
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeMap;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.eclipse.Activator;
-import org.apache.hadoop.eclipse.server.HadoopServer;
-import org.eclipse.jface.dialogs.MessageDialog;
-
-/**
- * Register of Hadoop locations.
- *
- * Each location corresponds to a Hadoop {@link Configuration} stored as an
- * XML file in the workspace plug-in configuration directory:
- *
- *
- * <workspace-dir>/.metadata/.plugins/org.apache.hadoop.eclipse/locations/*.xml
- *
- *
- */
-public class ServerRegistry {
-
- private static final ServerRegistry INSTANCE = new ServerRegistry();
-
- public static final int SERVER_ADDED = 0;
-
- public static final int SERVER_REMOVED = 1;
-
- public static final int SERVER_STATE_CHANGED = 2;
-
- private final File baseDir =
- Activator.getDefault().getStateLocation().toFile();
-
- private final File saveDir = new File(baseDir, "locations");
-
- private ServerRegistry() {
- if (saveDir.exists() && !saveDir.isDirectory())
- saveDir.delete();
- if (!saveDir.exists())
- saveDir.mkdirs();
-
- load();
- }
-
- private Map servers;
-
- private Set listeners =
- new HashSet();
-
- public static ServerRegistry getInstance() {
- return INSTANCE;
- }
-
- public synchronized Collection getServers() {
- return Collections.unmodifiableCollection(servers.values());
- }
-
- /**
- * Load all available locations from the workspace configuration directory.
- */
- private synchronized void load() {
- Map map = new TreeMap();
- for (File file : saveDir.listFiles()) {
- try {
- HadoopServer server = new HadoopServer(file);
- map.put(server.getLocationName(), server);
-
- } catch (Exception exn) {
- System.err.println(exn);
- }
- }
- this.servers = map;
- }
-
- private synchronized void store() {
- try {
- File dir = File.createTempFile("locations", "new", baseDir);
- dir.delete();
- dir.mkdirs();
-
- for (HadoopServer server : servers.values()) {
- server.storeSettingsToFile(new File(dir, server.getLocationName()
- + ".xml"));
- }
-
- FilenameFilter XMLFilter = new FilenameFilter() {
- public boolean accept(File dir, String name) {
- String lower = name.toLowerCase();
- return lower.endsWith(".xml");
- }
- };
-
- File backup = new File(baseDir, "locations.backup");
- if (backup.exists()) {
- for (File file : backup.listFiles(XMLFilter))
- if (!file.delete())
- throw new IOException("Unable to delete backup location file: "
- + file);
- if (!backup.delete())
- throw new IOException(
- "Unable to delete backup location directory: " + backup);
- }
-
- saveDir.renameTo(backup);
- dir.renameTo(saveDir);
-
- } catch (IOException ioe) {
- ioe.printStackTrace();
- MessageDialog.openError(null,
- "Saving configuration of Hadoop locations failed", ioe.toString());
- }
- }
-
- public void dispose() {
- for (HadoopServer server : getServers()) {
- server.dispose();
- }
- }
-
- public synchronized HadoopServer getServer(String location) {
- return servers.get(location);
- }
-
- /*
- * HadoopServer map listeners
- */
-
- public void addListener(IHadoopServerListener l) {
- synchronized (listeners) {
- listeners.add(l);
- }
- }
-
- public void removeListener(IHadoopServerListener l) {
- synchronized (listeners) {
- listeners.remove(l);
- }
- }
-
- private void fireListeners(HadoopServer location, int kind) {
- synchronized (listeners) {
- for (IHadoopServerListener listener : listeners) {
- listener.serverChanged(location, kind);
- }
- }
- }
-
- public synchronized void removeServer(HadoopServer server) {
- this.servers.remove(server.getLocationName());
- store();
- fireListeners(server, SERVER_REMOVED);
- }
-
- public synchronized void addServer(HadoopServer server) {
- this.servers.put(server.getLocationName(), server);
- store();
- fireListeners(server, SERVER_ADDED);
- }
-
- /**
- * Update one Hadoop location
- *
- * @param originalName the original location name (might have changed)
- * @param server the location
- */
- public synchronized void updateServer(String originalName,
- HadoopServer server) {
-
- // Update the map if the location name has changed
- if (!server.getLocationName().equals(originalName)) {
- servers.remove(originalName);
- servers.put(server.getLocationName(), server);
- }
- store();
- fireListeners(server, SERVER_STATE_CHANGED);
- }
-}
diff --git a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/view/servers/ServerView.java b/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/view/servers/ServerView.java
deleted file mode 100644
index 389d92e06ef..00000000000
--- a/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/view/servers/ServerView.java
+++ /dev/null
@@ -1,460 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.eclipse.view.servers;
-
-import java.util.Collection;
-
-import org.apache.hadoop.eclipse.ImageLibrary;
-import org.apache.hadoop.eclipse.actions.EditLocationAction;
-import org.apache.hadoop.eclipse.actions.NewLocationAction;
-import org.apache.hadoop.eclipse.server.HadoopJob;
-import org.apache.hadoop.eclipse.server.HadoopServer;
-import org.apache.hadoop.eclipse.server.IJobListener;
-import org.apache.hadoop.eclipse.server.JarModule;
-import org.apache.hadoop.eclipse.servers.IHadoopServerListener;
-import org.apache.hadoop.eclipse.servers.ServerRegistry;
-import org.eclipse.jface.action.Action;
-import org.eclipse.jface.action.IAction;
-import org.eclipse.jface.action.IMenuListener;
-import org.eclipse.jface.action.IMenuManager;
-import org.eclipse.jface.action.MenuManager;
-import org.eclipse.jface.dialogs.MessageDialog;
-import org.eclipse.jface.viewers.ILabelProviderListener;
-import org.eclipse.jface.viewers.ISelection;
-import org.eclipse.jface.viewers.ISelectionChangedListener;
-import org.eclipse.jface.viewers.IStructuredSelection;
-import org.eclipse.jface.viewers.ITableLabelProvider;
-import org.eclipse.jface.viewers.ITreeContentProvider;
-import org.eclipse.jface.viewers.ITreeSelection;
-import org.eclipse.jface.viewers.SelectionChangedEvent;
-import org.eclipse.jface.viewers.TreeViewer;
-import org.eclipse.jface.viewers.Viewer;
-import org.eclipse.swt.SWT;
-import org.eclipse.swt.graphics.Image;
-import org.eclipse.swt.layout.GridData;
-import org.eclipse.swt.widgets.Composite;
-import org.eclipse.swt.widgets.Display;
-import org.eclipse.swt.widgets.Menu;
-import org.eclipse.swt.widgets.Tree;
-import org.eclipse.swt.widgets.TreeColumn;
-import org.eclipse.ui.IViewSite;
-import org.eclipse.ui.PartInitException;
-import org.eclipse.ui.actions.ActionFactory;
-import org.eclipse.ui.part.ViewPart;
-
-/**
- * Map/Reduce locations view: displays all available Hadoop locations and the
- * Jobs running/finished on these locations
- */
-public class ServerView extends ViewPart implements ITreeContentProvider,
- ITableLabelProvider, IJobListener, IHadoopServerListener {
-
- /**
- * Deletion action: delete a Hadoop location, kill a running job or remove
- * a finished job entry
- */
- class DeleteAction extends Action {
-
- DeleteAction() {
- setText("Delete");
- setImageDescriptor(ImageLibrary.get("server.view.action.delete"));
- }
-
- /* @inheritDoc */
- @Override
- public void run() {
- ISelection selection =
- getViewSite().getSelectionProvider().getSelection();
- if ((selection != null) && (selection instanceof IStructuredSelection)) {
- Object selItem =
- ((IStructuredSelection) selection).getFirstElement();
-
- if (selItem instanceof HadoopServer) {
- HadoopServer location = (HadoopServer) selItem;
- if (MessageDialog.openConfirm(Display.getDefault()
- .getActiveShell(), "Confirm delete Hadoop location",
- "Do you really want to remove the Hadoop location: "
- + location.getLocationName())) {
- ServerRegistry.getInstance().removeServer(location);
- }
-
- } else if (selItem instanceof HadoopJob) {
-
- // kill the job
- HadoopJob job = (HadoopJob) selItem;
- if (job.isCompleted()) {
- // Job already finished, remove the entry
- job.getLocation().purgeJob(job);
-
- } else {
- // Job is running, kill the job?
- if (MessageDialog.openConfirm(Display.getDefault()
- .getActiveShell(), "Confirm kill running Job",
- "Do you really want to kill running Job: " + job.getJobID())) {
- job.kill();
- }
- }
- }
- }
- }
- }
-
- /**
- * This object is the root content for this content provider
- */
- private static final Object CONTENT_ROOT = new Object();
-
- private final IAction deleteAction = new DeleteAction();
-
- private final IAction editServerAction = new EditLocationAction(this);
-
- private final IAction newLocationAction = new NewLocationAction();
-
- private TreeViewer viewer;
-
- public ServerView() {
- }
-
- /* @inheritDoc */
- @Override
- public void init(IViewSite site) throws PartInitException {
- super.init(site);
- }
-
- /* @inheritDoc */
- @Override
- public void dispose() {
- ServerRegistry.getInstance().removeListener(this);
- }
-
- /**
- * Creates the columns for the view
- */
- @Override
- public void createPartControl(Composite parent) {
- Tree main =
- new Tree(parent, SWT.SINGLE | SWT.FULL_SELECTION | SWT.H_SCROLL
- | SWT.V_SCROLL);
- main.setHeaderVisible(true);
- main.setLinesVisible(false);
- main.setLayoutData(new GridData(GridData.FILL_BOTH));
-
- TreeColumn serverCol = new TreeColumn(main, SWT.SINGLE);
- serverCol.setText("Location");
- serverCol.setWidth(300);
- serverCol.setResizable(true);
-
- TreeColumn locationCol = new TreeColumn(main, SWT.SINGLE);
- locationCol.setText("Master node");
- locationCol.setWidth(185);
- locationCol.setResizable(true);
-
- TreeColumn stateCol = new TreeColumn(main, SWT.SINGLE);
- stateCol.setText("State");
- stateCol.setWidth(95);
- stateCol.setResizable(true);
-
- TreeColumn statusCol = new TreeColumn(main, SWT.SINGLE);
- statusCol.setText("Status");
- statusCol.setWidth(300);
- statusCol.setResizable(true);
-
- viewer = new TreeViewer(main);
- viewer.setContentProvider(this);
- viewer.setLabelProvider(this);
- viewer.setInput(CONTENT_ROOT); // don't care
-
- getViewSite().setSelectionProvider(viewer);
-
- getViewSite().getActionBars().setGlobalActionHandler(
- ActionFactory.DELETE.getId(), deleteAction);
- getViewSite().getActionBars().getToolBarManager().add(editServerAction);
- getViewSite().getActionBars().getToolBarManager().add(newLocationAction);
-
- createActions();
- createContextMenu();
- }
-
- /**
- * Actions
- */
- private void createActions() {
- /*
- * addItemAction = new Action("Add...") { public void run() { addItem(); } };
- * addItemAction.setImageDescriptor(ImageLibrary
- * .get("server.view.location.new"));
- */
- /*
- * deleteItemAction = new Action("Delete") { public void run() {
- * deleteItem(); } };
- * deleteItemAction.setImageDescriptor(getImageDescriptor("delete.gif"));
- *
- * selectAllAction = new Action("Select All") { public void run() {
- * selectAll(); } };
- */
- // Add selection listener.
- viewer.addSelectionChangedListener(new ISelectionChangedListener() {
- public void selectionChanged(SelectionChangedEvent event) {
- updateActionEnablement();
- }
- });
- }
-
- private void addItem() {
- System.out.printf("ADD ITEM\n");
- }
-
- private void updateActionEnablement() {
- IStructuredSelection sel = (IStructuredSelection) viewer.getSelection();
- // deleteItemAction.setEnabled(sel.size() > 0);
- }
-
- /**
- * Contextual menu
- */
- private void createContextMenu() {
- // Create menu manager.
- MenuManager menuMgr = new MenuManager();
- menuMgr.setRemoveAllWhenShown(true);
- menuMgr.addMenuListener(new IMenuListener() {
- public void menuAboutToShow(IMenuManager mgr) {
- fillContextMenu(mgr);
- }
- });
-
- // Create menu.
- Menu menu = menuMgr.createContextMenu(viewer.getControl());
- viewer.getControl().setMenu(menu);
-
- // Register menu for extension.
- getSite().registerContextMenu(menuMgr, viewer);
- }
-
- private void fillContextMenu(IMenuManager mgr) {
- mgr.add(newLocationAction);
- mgr.add(editServerAction);
- mgr.add(deleteAction);
- /*
- * mgr.add(new GroupMarker(IWorkbenchActionConstants.MB_ADDITIONS));
- * mgr.add(deleteItemAction); mgr.add(new Separator());
- * mgr.add(selectAllAction);
- */
- }
-
- /* @inheritDoc */
- @Override
- public void setFocus() {
-
- }
-
- /*
- * IHadoopServerListener implementation
- */
-
- /* @inheritDoc */
- public void serverChanged(HadoopServer location, int type) {
- Display.getDefault().syncExec(new Runnable() {
- public void run() {
- ServerView.this.viewer.refresh();
- }
- });
- }
-
- /*
- * IStructuredContentProvider implementation
- */
-
- /* @inheritDoc */
- public void inputChanged(final Viewer viewer, Object oldInput,
- Object newInput) {
- if (oldInput == CONTENT_ROOT)
- ServerRegistry.getInstance().removeListener(this);
- if (newInput == CONTENT_ROOT)
- ServerRegistry.getInstance().addListener(this);
- }
-
- /**
- * The root elements displayed by this view are the existing Hadoop
- * locations
- */
- /* @inheritDoc */
- public Object[] getElements(Object inputElement) {
- return ServerRegistry.getInstance().getServers().toArray();
- }
-
- /*
- * ITreeStructuredContentProvider implementation
- */
-
- /**
- * Each location contains a child entry for each job it runs.
- */
- /* @inheritDoc */
- public Object[] getChildren(Object parent) {
-
- if (parent instanceof HadoopServer) {
- HadoopServer location = (HadoopServer) parent;
- location.addJobListener(this);
- Collection jobs = location.getJobs();
- return jobs.toArray();
- }
-
- return null;
- }
-
- /* @inheritDoc */
- public Object getParent(Object element) {
- if (element instanceof HadoopServer) {
- return CONTENT_ROOT;
-
- } else if (element instanceof HadoopJob) {
- return ((HadoopJob) element).getLocation();
- }
-
- return null;
- }
-
- /* @inheritDoc */
- public boolean hasChildren(Object element) {
- /* Only server entries have children */
- return (element instanceof HadoopServer);
- }
-
- /*
- * ITableLabelProvider implementation
- */
-
- /* @inheritDoc */
- public void addListener(ILabelProviderListener listener) {
- // no listeners handling
- }
-
- public boolean isLabelProperty(Object element, String property) {
- return false;
- }
-
- /* @inheritDoc */
- public void removeListener(ILabelProviderListener listener) {
- // no listener handling
- }
-
- /* @inheritDoc */
- public Image getColumnImage(Object element, int columnIndex) {
- if ((columnIndex == 0) && (element instanceof HadoopServer)) {
- return ImageLibrary.getImage("server.view.location.entry");
-
- } else if ((columnIndex == 0) && (element instanceof HadoopJob)) {
- return ImageLibrary.getImage("server.view.job.entry");
- }
- return null;
- }
-
- /* @inheritDoc */
- public String getColumnText(Object element, int columnIndex) {
- if (element instanceof HadoopServer) {
- HadoopServer server = (HadoopServer) element;
-
- switch (columnIndex) {
- case 0:
- return server.getLocationName();
- case 1:
- return server.getMasterHostName().toString();
- case 2:
- return server.getState();
- case 3:
- return "";
- }
- } else if (element instanceof HadoopJob) {
- HadoopJob job = (HadoopJob) element;
-
- switch (columnIndex) {
- case 0:
- return job.getJobID().toString();
- case 1:
- return "";
- case 2:
- return job.getState().toString();
- case 3:
- return job.getStatus();
- }
- } else if (element instanceof JarModule) {
- JarModule jar = (JarModule) element;
-
- switch (columnIndex) {
- case 0:
- return jar.toString();
- case 1:
- return "Publishing jar to server..";
- case 2:
- return "";
- }
- }
-
- return null;
- }
-
- /*
- * IJobListener (Map/Reduce Jobs listener) implementation
- */
-
- /* @inheritDoc */
- public void jobAdded(HadoopJob job) {
- viewer.refresh();
- }
-
- /* @inheritDoc */
- public void jobRemoved(HadoopJob job) {
- viewer.refresh();
- }
-
- /* @inheritDoc */
- public void jobChanged(HadoopJob job) {
- viewer.refresh(job);
- }
-
- /* @inheritDoc */
- public void publishDone(JarModule jar) {
- viewer.refresh();
- }
-
- /* @inheritDoc */
- public void publishStart(JarModule jar) {
- viewer.refresh();
- }
-
- /*
- * Miscellaneous
- */
-
- /**
- * Return the currently selected server (null if there is no selection or
- * if the selection is not a server)
- *
- * @return the currently selected server entry
- */
- public HadoopServer getSelectedServer() {
- ITreeSelection selection = (ITreeSelection) viewer.getSelection();
- Object first = selection.getFirstElement();
- if (first instanceof HadoopServer) {
- return (HadoopServer) first;
- }
- return null;
- }
-
-}