Move eclipse-plugin from common to mapreduce

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@790831 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Giridharan Kesavan 2009-07-03 08:54:20 +00:00
parent cf3d3a0643
commit aa471150ea
119 changed files with 0 additions and 9066 deletions

View File

@ -1,10 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry excluding="org/apache/hadoop/eclipse/server/CopyOfHadoopServer.java" kind="src" path="src/java"/>
<classpathentry exported="true" kind="lib" path="classes" sourcepath="classes"/>
<classpathentry exported="true" kind="lib" path="lib/commons-cli-2.0-SNAPSHOT.jar"/>
<classpathentry kind="lib" path="lib/hadoop-core.jar" sourcepath="/hadoop-socks/src/java"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry kind="output" path="classes"/>
</classpath>

View File

@ -1,28 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>MapReduceTools</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.pde.ManifestBuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.pde.SchemaBuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.pde.PluginNature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
</projectDescription>

View File

@ -1,262 +0,0 @@
#Sat Oct 13 13:37:43 CEST 2007
eclipse.preferences.version=1
instance/org.eclipse.core.net/org.eclipse.core.net.hasMigrated=true
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
org.eclipse.jdt.core.compiler.debug.lineNumber=generate
org.eclipse.jdt.core.compiler.debug.localVariable=generate
org.eclipse.jdt.core.compiler.debug.sourceFile=generate
org.eclipse.jdt.core.formatter.align_type_members_on_columns=false
org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression=16
org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant=16
org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call=16
org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation=16
org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression=16
org.eclipse.jdt.core.formatter.alignment_for_assignment=16
org.eclipse.jdt.core.formatter.alignment_for_binary_expression=16
org.eclipse.jdt.core.formatter.alignment_for_compact_if=16
org.eclipse.jdt.core.formatter.alignment_for_conditional_expression=80
org.eclipse.jdt.core.formatter.alignment_for_enum_constants=0
org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer=16
org.eclipse.jdt.core.formatter.alignment_for_multiple_fields=16
org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration=16
org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration=16
org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation=16
org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration=16
org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration=16
org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration=16
org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration=16
org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration=16
org.eclipse.jdt.core.formatter.blank_lines_after_imports=1
org.eclipse.jdt.core.formatter.blank_lines_after_package=1
org.eclipse.jdt.core.formatter.blank_lines_before_field=1
org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration=0
org.eclipse.jdt.core.formatter.blank_lines_before_imports=1
org.eclipse.jdt.core.formatter.blank_lines_before_member_type=1
org.eclipse.jdt.core.formatter.blank_lines_before_method=1
org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk=1
org.eclipse.jdt.core.formatter.blank_lines_before_package=0
org.eclipse.jdt.core.formatter.blank_lines_between_import_groups=1
org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations=1
org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_array_initializer=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_block=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_block_in_case=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_enum_constant=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_method_declaration=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_switch=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_type_declaration=end_of_line
org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment=false
org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment=false
org.eclipse.jdt.core.formatter.comment.format_block_comments=true
org.eclipse.jdt.core.formatter.comment.format_header=false
org.eclipse.jdt.core.formatter.comment.format_html=true
org.eclipse.jdt.core.formatter.comment.format_javadoc_comments=true
org.eclipse.jdt.core.formatter.comment.format_line_comments=true
org.eclipse.jdt.core.formatter.comment.format_source_code=true
org.eclipse.jdt.core.formatter.comment.indent_parameter_description=false
org.eclipse.jdt.core.formatter.comment.indent_root_tags=true
org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags=insert
org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter=do not insert
org.eclipse.jdt.core.formatter.comment.line_length=77
org.eclipse.jdt.core.formatter.compact_else_if=true
org.eclipse.jdt.core.formatter.continuation_indentation=2
org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer=2
org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line=false
org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header=true
org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header=true
org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header=true
org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header=true
org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases=true
org.eclipse.jdt.core.formatter.indent_empty_lines=false
org.eclipse.jdt.core.formatter.indent_statements_compare_to_block=true
org.eclipse.jdt.core.formatter.indent_statements_compare_to_body=true
org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases=true
org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch=true
org.eclipse.jdt.core.formatter.indentation.size=4
org.eclipse.jdt.core.formatter.insert_new_line_after_annotation=insert
org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer=do not insert
org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing=insert
org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement=do not insert
org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer=do not insert
org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement=do not insert
org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement=do not insert
org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement=do not insert
org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration=insert
org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration=insert
org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block=insert
org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant=insert
org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration=insert
org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body=insert
org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration=insert
org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter=insert
org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator=insert
org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_binary_operator=insert
org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments=insert
org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters=insert
org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block=insert
org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast=insert
org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert=insert
org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case=insert
org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional=insert
org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for=insert
org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters=insert
org.eclipse.jdt.core.formatter.insert_space_after_ellipsis=insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer=insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional=insert
org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for=insert
org.eclipse.jdt.core.formatter.insert_space_after_unary_operator=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter=insert
org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator=insert
org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration=insert
org.eclipse.jdt.core.formatter.insert_space_before_binary_operator=insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer=insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert=insert
org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional=insert
org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for=insert
org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_ellipsis=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while=insert
org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return=insert
org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw=insert
org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional=insert
org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_semicolon=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_unary_operator=do not insert
org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer=do not insert
org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression=do not insert
org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant=do not insert
org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert
org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line=false
org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line=false
org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line=false
org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line=false
org.eclipse.jdt.core.formatter.lineSplit=77
org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column=false
org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column=false
org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body=0
org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve=1
org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line=true
org.eclipse.jdt.core.formatter.tabulation.char=space
org.eclipse.jdt.core.formatter.tabulation.size=2
org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations=false
org.eclipse.jdt.core.formatter.wrap_before_binary_operator=true

View File

@ -1,6 +0,0 @@
#Tue Aug 14 19:41:15 PDT 2007
eclipse.preferences.version=1
formatter_profile=_Lucene
formatter_settings_version=11
instance/org.eclipse.core.net/org.eclipse.core.net.hasMigrated=true
org.eclipse.jdt.ui.text.custom_code_templates=<?xml version\="1.0" encoding\="UTF-8" standalone\="no"?><templates/>

View File

@ -1,6 +0,0 @@
#Tue Aug 14 19:41:15 PDT 2007
DELEGATES_PREFERENCE=delegateValidatorListorg.eclipse.wst.xsd.core.internal.validation.eclipse.XSDDelegatingValidator\=org.eclipse.wst.xsd.core.internal.validation.eclipse.Validator;org.eclipse.wst.wsdl.validation.internal.eclipse.WSDLDelegatingValidator\=org.eclipse.wst.wsdl.validation.internal.eclipse.Validator;
USER_BUILD_PREFERENCE=enabledBuildValidatorListorg.eclipse.wst.xsd.core.internal.validation.eclipse.XSDDelegatingValidator;org.eclipse.jst.jsp.core.internal.validation.JSPContentValidator;org.eclipse.wst.html.internal.validation.HTMLValidator;org.eclipse.wst.xml.core.internal.validation.eclipse.Validator;org.eclipse.jst.jsf.validation.internal.appconfig.AppConfigValidator;org.eclipse.jst.jsp.core.internal.validation.JSPBatchValidator;org.eclipse.wst.dtd.core.internal.validation.eclipse.Validator;org.eclipse.wst.wsi.ui.internal.WSIMessageValidator;org.eclipse.wst.wsdl.validation.internal.eclipse.WSDLDelegatingValidator;org.eclipse.jst.jsf.validation.internal.JSPSemanticsValidator;
USER_MANUAL_PREFERENCE=enabledManualValidatorListorg.eclipse.wst.xsd.core.internal.validation.eclipse.XSDDelegatingValidator;org.eclipse.jst.jsp.core.internal.validation.JSPContentValidator;org.eclipse.wst.html.internal.validation.HTMLValidator;org.eclipse.wst.xml.core.internal.validation.eclipse.Validator;org.eclipse.jst.jsf.validation.internal.appconfig.AppConfigValidator;org.eclipse.jst.jsp.core.internal.validation.JSPBatchValidator;org.eclipse.wst.dtd.core.internal.validation.eclipse.Validator;org.eclipse.wst.wsi.ui.internal.WSIMessageValidator;org.eclipse.wst.wsdl.validation.internal.eclipse.WSDLDelegatingValidator;org.eclipse.jst.jsf.validation.internal.JSPSemanticsValidator;
USER_PREFERENCE=overrideGlobalPreferencesfalse
eclipse.preferences.version=1

View File

@ -1,29 +0,0 @@
Manifest-Version: 1.0
Bundle-ManifestVersion: 2
Bundle-Name: MapReduce Tools for Eclipse
Bundle-SymbolicName: org.apache.hadoop.eclipse;singleton:=true
Bundle-Version: 0.18
Bundle-Activator: org.apache.hadoop.eclipse.Activator
Bundle-Localization: plugin
Require-Bundle: org.eclipse.ui,
org.eclipse.core.runtime,
org.eclipse.jdt.launching,
org.eclipse.debug.core,
org.eclipse.jdt,
org.eclipse.jdt.core,
org.eclipse.core.resources,
org.eclipse.ui.ide,
org.eclipse.jdt.ui,
org.eclipse.debug.ui,
org.eclipse.jdt.debug.ui,
org.eclipse.core.expressions,
org.eclipse.ui.cheatsheets,
org.eclipse.ui.console,
org.eclipse.ui.navigator,
org.eclipse.core.filesystem,
org.apache.commons.logging
Eclipse-LazyStart: true
Bundle-ClassPath: classes/,
lib/commons-cli-2.0-SNAPSHOT.jar,
lib/hadoop-core.jar
Bundle-Vendor: Apache Hadoop

View File

@ -1,7 +0,0 @@
output.. = bin/
bin.includes = META-INF/,\
plugin.xml,\
resources/,\
classes/,\
classes/,\
lib/

View File

@ -1,80 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project default="jar" name="eclipse-plugin">
<import file="../build-contrib.xml"/>
<path id="eclipse-sdk-jars">
<fileset dir="${eclipse.home}/plugins/">
<include name="org.eclipse.ui*.jar"/>
<include name="org.eclipse.jdt*.jar"/>
<include name="org.eclipse.core*.jar"/>
<include name="org.eclipse.equinox*.jar"/>
<include name="org.eclipse.debug*.jar"/>
<include name="org.eclipse.osgi*.jar"/>
<include name="org.eclipse.swt*.jar"/>
<include name="org.eclipse.jface*.jar"/>
<include name="org.eclipse.team.cvs.ssh2*.jar"/>
<include name="com.jcraft.jsch*.jar"/>
</fileset>
</path>
<!-- Override classpath to include Eclipse SDK jars -->
<path id="classpath">
<pathelement location="${build.classes}"/>
<pathelement location="${hadoop.root}/build/classes"/>
<path refid="eclipse-sdk-jars"/>
</path>
<!-- Skip building if eclipse.home is unset. -->
<target name="check-contrib" unless="eclipse.home">
<property name="skip.contrib" value="yes"/>
<echo message="eclipse.home unset: skipping eclipse plugin"/>
</target>
<target name="compile" depends="init, ivy-retrieve-common" unless="skip.contrib">
<echo message="contrib: ${name}"/>
<javac
encoding="${build.encoding}"
srcdir="${src.dir}"
includes="**/*.java"
destdir="${build.classes}"
debug="${javac.debug}"
deprecation="${javac.deprecation}">
<classpath refid="classpath"/>
</javac>
</target>
<!-- Override jar target to specify manifest -->
<target name="jar" depends="compile" unless="skip.contrib">
<mkdir dir="${build.dir}/lib"/>
<copy file="${hadoop.root}/build/hadoop-${version}-core.jar" tofile="${build.dir}/lib/hadoop-core.jar" verbose="true"/>
<copy file="${hadoop.root}/build/ivy/lib/Hadoop/common/commons-cli-${commons-cli.version}.jar" todir="${build.dir}/lib" verbose="true"/>
<copy file="${hadoop.root}/build/ivy/lib/Hadoop/common/commons-cli-${commons-cli2.version}.jar" todir="${build.dir}/lib" verbose="true"/>
<jar
jarfile="${build.dir}/hadoop-${version}-${name}.jar"
manifest="${root}/META-INF/MANIFEST.MF">
<fileset dir="${build.dir}" includes="classes/ lib/"/>
<fileset dir="${root}" includes="resources/ plugin.xml"/>
</jar>
</target>
</project>

View File

@ -1,36 +0,0 @@
<?xml version="1.0" ?>
<ivy-module version="1.0">
<info organisation="org.apache.hadoop" module="${ant.project.name}">
<license name="Apache 2.0"/>
<ivyauthor name="Apache Hadoop Team" url="http://hadoop.apache.org"/>
<description>
Apache Hadoop
</description>
</info>
<configurations defaultconfmapping="default">
<!--these match the Maven configurations-->
<conf name="default" extends="master,runtime"/>
<conf name="master" description="contains the artifact but no dependencies"/>
<conf name="runtime" description="runtime but not the artifact" />
<conf name="common" visibility="private"
extends="runtime"
description="artifacts needed for compile/test the application"/>
<conf name="test" visibility="private" extends="runtime"/>
</configurations>
<publications>
<!--get the artifact from our module name-->
<artifact conf="master"/>
</publications>
<dependencies>
<dependency org="commons-logging"
name="commons-logging"
rev="${commons-logging.version}"
conf="common->default"/>
<dependency org="log4j"
name="log4j"
rev="${log4j.version}"
conf="common->master"/>
</dependencies>
</ivy-module>

View File

@ -1,5 +0,0 @@
#This properties file lists the versions of the various artifacts used by streaming.
#It drives ivy and the generation of a maven POM
#Please list the dependencies name with version if they are different from the ones
#listed in the global libraries.properties file (in alphabetical order)

View File

@ -1,287 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<?eclipse version="3.2"?>
<plugin>
<!-- extension
point="org.eclipse.debug.core.launchConfigurationTypes">
<launchConfigurationType
delegate="org.apache.hadoop.eclipse.launch.HadoopLaunchDelegate"
id="org.apache.hadoop.eclipse.launch.StartServer"
modes="run,debug"
name="Start Hadoop Server"
public="true"/>
</extension -->
<extension
name="MapReduce Nature"
id="org.apache.hadoop.eclipse.Nature"
point="org.eclipse.core.resources.natures">
<runtime>
<run class="org.apache.hadoop.eclipse.MapReduceNature"/>
</runtime>
</extension>
<extension
point="org.eclipse.ui.ide.projectNatureImages">
</extension>
<!-- Wizards: new Mapper, Reducer, Driver -->
<extension
point="org.eclipse.ui.newWizards">
<primaryWizard id="org.apache.hadoop.eclipse.NewProjectWizard"/>
<wizard
category="org.apache.hadoop.eclipse.category"
class="org.apache.hadoop.eclipse.NewMapReduceProjectWizard"
finalPerspective="org.apache.hadoop.eclipse.Perspective"
hasPages="true"
icon="resources/Elephant16x16.gif"
id="org.apache.hadoop.eclipse.NewProjectWizard"
name="Map/Reduce Project"
preferredPerspectives="org.apache.hadoop.eclipse.Perspective"
project="true"/>
<wizard
category="org.apache.hadoop.eclipse.category"
class="org.apache.hadoop.eclipse.NewMapperWizard"
icon="resources/mapper16.png"
id="org.apache.hadoop.eclipse.NewMapperWizard"
name="Mapper"
project="false"/>
<wizard
category="org.apache.hadoop.eclipse.category"
class="org.apache.hadoop.eclipse.NewReducerWizard"
icon="resources/reducer16.png"
id="org.apache.hadoop.eclipse.NewReducerWizard"
name="Reducer"
project="false"/>
<wizard
category="org.apache.hadoop.eclipse.category"
class="org.apache.hadoop.eclipse.NewDriverWizard"
icon="resources/driver.png"
id="org.apache.hadoop.eclipse.NewDriverWizard"
name="MapReduce Driver"
project="false"/>
<category
id="org.apache.hadoop.eclipse.category"
name="Map/Reduce"/>
</extension>
<extension
point="org.eclipse.debug.ui.launchConfigurationTypeImages">
<launchConfigurationTypeImage
configTypeID="org.apache.hadoop.eclipse.launch.Local"
icon="resources/elephantblue16x16.gif"
id="Hadouken.launchConfigurationTypeImage1"/>
</extension>
<extension
point="org.eclipse.debug.ui.launchConfigurationTabGroups">
<launchConfigurationTabGroup
class="org.apache.hadoop.eclipse.launch.StartHadoopLaunchTabGroup"
id="org.apache.hadoop.eclipse.launch.StartHadoopLaunchTabGroup"
type="org.apache.hadoop.eclipse.launch.StartServer"/>
</extension>
<!-- Perspective: Map/Reduce -->
<extension
point="org.eclipse.ui.perspectives">
<perspective
class="org.apache.hadoop.eclipse.HadoopPerspectiveFactory"
icon="resources/elephantblue16x16.gif"
id="org.apache.hadoop.eclipse.Perspective"
name="Map/Reduce"/>
</extension>
<!-- Needed: allows DFS Browsing in Navigator! [TODO] -->
<extension
point="org.eclipse.core.expressions.propertyTesters">
<propertyTester
class="org.apache.hadoop.eclipse.PropertyTester"
id="mapreduce.deployable"
namespace="mapreduce"
properties="deployable"
type="org.eclipse.core.resources.IResource"/>
<propertyTester
class="org.apache.hadoop.eclipse.PropertyTester"
id="mapreduce.server"
namespace="mapreduce"
properties="server"
type="org.eclipse.wst.server.core.IServer"/>
</extension>
<!-- Run on Hadoop action -->
<extension
point="org.eclipse.debug.ui.launchShortcuts">
<shortcut
class="org.apache.hadoop.eclipse.launch.HadoopApplicationLaunchShortcut"
icon="resources/elephantblue16x16.gif"
id="org.apache.hadoop.eclipse.launch.shortcut"
label="Run on Hadoop"
modes="run">
<contextualLaunch>
<enablement>
<with variable="selection">
<count value="1"/>
<iterate>
<or>
<test property="org.eclipse.jdt.launching.hasMain"/>
<and>
<test property="org.eclipse.jdt.launching.isContainer"/>
<test property="org.eclipse.jdt.launching.hasProjectNature" args="org.eclipse.jdt.core.javanature"/>
<test property="org.eclipse.jdt.launching.hasProjectNature" args="org.apache.hadoop.eclipse.Nature"/>
</and>
</or>
</iterate>
</with>
</enablement>
</contextualLaunch>
<perspective id="org.apache.hadoop.eclipse.Perspective"/>
</shortcut>
</extension>
<!-- Hadoop locations view -->
<extension
point="org.eclipse.ui.views">
<category
id="org.apache.hadoop.eclipse.view.servers"
name="MapReduce Tools"/>
<view
allowMultiple="false"
category="org.apache.hadoop.eclipse.view.servers"
class="org.apache.hadoop.eclipse.view.servers.ServerView"
icon="resources/hadoop-logo-16x16.png"
id="org.apache.hadoop.eclipse.view.servers"
name="Map/Reduce Locations">
</view>
</extension>
<!-- ??? -->
<!-- extension
point="org.eclipse.ui.cheatsheets.cheatSheetContent">
<category
id="org.apache.hadoop.eclipse.cheatsheet.Examples"
name="MapReduce"/>
<cheatsheet
category="org.apache.hadoop.eclipse.cheatsheet.Examples"
composite="true"
contentFile="resources/HelloWorld.xml"
id="org.apache.hadoop.eclipse.cheatsheet"
name="Write a MapReduce application"/>
</extension -->
<!-- DFS Browser -->
<extension
point="org.eclipse.ui.navigator.navigatorContent">
<navigatorContent
activeByDefault="true"
contentProvider="org.apache.hadoop.eclipse.dfs.DFSContentProvider"
icon="resources/elephantblue16x16.gif"
id="org.apache.hadoop.eclipse.views.dfscontent"
labelProvider="org.apache.hadoop.eclipse.dfs.DFSContentProvider"
name="Hadoop Distributed File Systems"
priority="normal"
providesSaveables="false">
<triggerPoints>
<or>
<instanceof value="org.apache.hadoop.eclipse.dfs.DFSPath"/>
<adapt type="org.eclipse.core.resources.IResource">
<test
forcePluginActivation="true"
property="mapreduce.deployable"/>
</adapt>
</or>
</triggerPoints>
<actionProvider class="org.apache.hadoop.eclipse.dfs.ActionProvider">
</actionProvider>
<possibleChildren>
<or>
<instanceof value="org.eclipse.wst.server.core.IServer"/>
<instanceof value="org.apache.hadoop.eclipse.dfs.DFSLocationsRoot"/>
<instanceof value="org.apache.hadoop.eclipse.dfs.DFSLocation"/>
<instanceof value="org.apache.hadoop.eclipse.dfs.DFSPath"/>
</or>
</possibleChildren>
</navigatorContent>
</extension>
<!-- DFS Actions -->
<extension
point="org.eclipse.ui.navigator.viewer">
<viewer
viewerId="org.apache.hadoop.eclipse.dfs.DFSViewer">
<popupMenu
allowsPlatformContributions="true"
id="org.apache.hadoop.eclipse.dfs.DFSViewer#PopupMenu">
<insertionPoint name="group.new"/>
<insertionPoint
name="group.open"
separator="true"/>
<insertionPoint name="group.openWith"/>
<insertionPoint name="group.edit"
separator="true"/>
<insertionPoint name="group.reorganize" />
<insertionPoint
name="group.port"
separator="true"/>
<insertionPoint
name="group.build"
separator="true"/>
<insertionPoint
name="group.generate"
separator="true"/>
<insertionPoint
name="group.search"
separator="true"/>
<insertionPoint
name="additions"
separator="true"/>
<insertionPoint
name="group.properties"
separator="true"/>
</popupMenu>
</viewer>
<viewerContentBinding viewerId="org.eclipse.ui.navigator.ProjectExplorer">
<includes>
<contentExtension
isRoot="false"
pattern="org.apache.hadoop.eclipse.views.dfscontent"/>
<actionExtension pattern="org.apache.hadoop.eclipse.views.dfscontent.*"/>
</includes>
</viewerContentBinding>
</extension>
<!-- HDFS FileSystem registration [TODO] -->
<!-- extension
point="org.eclipse.core.filesystem.filesystems">
<filesystem scheme="hdfs">
<run class="org.apache.hadoop.eclipse.dfs.FileSystem"/>
</filesystem>
</extension -->
<!--
<extension
point="org.eclipse.ui.popupMenus">
<viewerContribution
id="Hadouken.viewerContribution1"
targetID="org.eclipse.ui.navigator.ProjectExplorer#PopupMenu"/>
</extension> -->
<!-- Preferences -->
<extension
point="org.eclipse.ui.preferencePages">
<page
class="org.apache.hadoop.eclipse.preferences.MapReducePreferencePage"
id="org.apache.hadoop.eclipse.preferences.MapReducePreferencePage"
name="Hadoop Map/Reduce"/>
</extension>
<extension
point="org.eclipse.core.runtime.preferences">
<initializer class="org.apache.hadoop.eclipse.preferences.PreferenceInitializer"/>
</extension>
</plugin>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1006 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 359 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 853 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.2 KiB

View File

@ -1,32 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<cheatsheet title="Set default Hadoop path tutorial">
<intro>
<description>
This tutorial informs you how to set the default Hadoop
directory for the plugin.
</description>
</intro>
<item title="Create MapReduce Cluster" skip="true">
<description>
Define a MapReduce cluster [if you have not done so already]
by opening the MapReduce Servers view and clicking on the
blue elephant in the upper right.
Use the following embedded command to create a new Hadoop Server:
</description>
<action pluginId="com.ibm.hipods.mapreduce"
class="org.apache.hadoop.eclipse.actions.NewServerAction" />
</item>
<item title="Open and Explore DFS Tree">
<description>
Project Explorer view shows an elephant icon for each defined
server. Opening a server entry will open a connection to
the root of that server's DFS tree. You can then explore the
DFS tree.
</description>
</item>
</cheatsheet>

View File

@ -1,62 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<cheatsheet title="MapReduce project creation tutorial">
<intro>
<description>
This tutorial guides you through the creation of a simple
MapReduce project with three MapReduce classes: a Mapper, a
Reducer, and a Driver.
</description>
</intro>
<item title="Open the MapReduce Perspective">
<action pluginId="org.eclipse.ui.cheatsheets"
class="org.eclipse.ui.internal.cheatsheets.actions.OpenPerspective"
param1="org.apache.hadoop.eclipse.Perspective" />
<description>
Select <b>Window->Open Perspective->MapReduce</b> in the menubar at
the top of the workbench. This step changes the perspective
to set up the Eclipse workbench for MapReduce development.
</description>
</item>
<item title="Create a MapReduce project" skip="true">
<action pluginId="com.ibm.hipods.mapreduce"
class="org.apache.hadoop.eclipse.actions.OpenNewMRProjectAction" />
<description>
The first thing you will need is a MapReduce Project. If you
already have a MapReduce project in your workspace that you
would like to use, you may skip this step by clicking the
"Click to Skip" button. If not, select <b>File->New->Project</b>
and choose MapReduce Project in the list. Complete the
subsequent pages as required.
</description>
</item>
<item title="Create a MapReduce package" skip="true">
<action pluginId="org.eclipse.jdt.ui"
class="org.eclipse.jdt.ui.actions.OpenNewPackageWizardAction" />
<description>
You should now have a MapReduce project in your workspace.
The next thing to do is creating a package. Use the Eclipse
tools by selecting <b>File -> New ->Package</b> action. Specify the
source folder (the project containing the package). Then,
give the package a name, such as "mapreduce.test", and click
the "Finish" button. If you already have a project with a
package you might as well skip this step.
</description>
</item>
<item title="Create the MapReduce application classes" skip="true">
<description>
Now you should be set up for creating your MapReduce
application. The MapReduce application consists of three
classes: a Mapper class, a Reducer class and a Driver class.
In this step you will create the three classes. Use the
class wizard by selecting <b>File -> New -> Class</b>.
Repeat this for every class.
</description>
<repeated-subitem values="Mapper,Reducer,Driver">
<subitem label="Create the class ${this}.">
<action pluginId="com.ibm.hipods.mapreduce"
class="org.apache.hadoop.eclipse.actions.OpenNewMRClassWizardAction"
param1="${this}" />
</subitem>
</repeated-subitem>
</item>
</cheatsheet>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 866 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 31 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 809 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 23 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 790 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 777 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.3 KiB

View File

@ -1,121 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<compositeCheatsheet name="IBM MapReduce Tools for Eclipse">
<taskGroup name="Develop Hadoop Applications" kind="set">
<intro
href="http://www.alphaworks.ibm.com/tech/mapreducetools">
IBM(R) MapReduce Tools for Eclipse enables you to write
distributed applications based on the MapReduce paradigm
using the Apache Hadoop runtime. This cheat sheet will walk
you through the steps needed to write a MapReduce
application and run it on a Hadoop server.
</intro>
<onCompletion>
</onCompletion>
<taskGroup name="Initial Setup" kind="sequence" skip="true">
<intro>
This task takes you through the steps to setup the
Hadoop environment with the MapReduce Tools. If you
already have Hadoop installed and linked to Eclipse, you
can skip this task.
</intro>
<onCompletion>
Congratulations! You have now installed Hadoop on your
computer and linked it with the MapReduce Tools.
</onCompletion>
<task kind="cheatsheet"
name="Download and unzip Apache Hadoop distribution">
<intro>
Hadoop must be downloaded to a place where Eclipse
can access its libraries. This task covers the steps
needed to execute this task.
</intro>
<param name="showIntro" value="false" />
<param name="path" value="Setup.xml" />
<onCompletion>
The plugin currently supports Hadoop v0.7.2 through
0.12.2. Now click on the top-most link that you feel
comfortable installing.
</onCompletion>
</task>
<task kind="cheatsheet"
name="Specify path to Apache Hadoop distribution">
...
<intro>
This tutorial informs you how to set the default
Hadoop directory for the plugin.
</intro>
<param name="showIntro" value="false" />
<param name="path" value="SetHadoopPath.xml" />
</task>
</taskGroup>
<taskGroup name="Create and run a MapReduce project"
kind="sequence" skip="true">
<intro>
This section walks you through the steps to create and
run your MapReduce project.
</intro>
<task kind="cheatsheet" name="Create a MapReduce project"
skip="true">
<intro>
This tutorial guides you through the creation of a
simple MapReduce project with three MapReduce
classes: a Mapper, a Reducer, and a Driver.
</intro>
<param name="showIntro" value="false" />
<param name="path" value="CreateProj.xml" />
<onCompletion>
Congratulations! You have now mastered the steps for
creating a Hadoop project.
</onCompletion>
</task>
<task kind="cheatsheet"
name="Run a MapReduce application">
<param name="path" value="RunProj.xml" />
<onCompletion>
Congratulations! You have now mastered the steps for
implementing a Hadoop application.
</onCompletion>
</task>
</taskGroup>
<taskGroup name="Using a MapReduce cluster" kind="set"
skip="true">
<intro>
The MapReduce Tools for Eclipse plugin lets you
browse and upload files to the DFS of a MapReduce cluster.
</intro>
<onCompletion>
Congratulations! You have completed the tutorials on using a
MapReduce Cluster.
</onCompletion>
<task kind="cheatsheet"
name="Connect to a MapReduce cluster" skip="true">
<intro>
This tutorial explains how to show files in the DFS of a
MapReduce cluster.
</intro>
<param name="showIntro" value="false" />
<param name="path" value="ConnectDFS.xml" />
</task>
<task kind="cheatsheet" id="viewFiles"
name="Viewing file contents on the Hadoop Distributed File System (HDFS)">
<intro>
Simply double-click on any file in the DFS in the Project
Explorer view.
</intro>
</task>
<task kind="cheatsheet"
name="Transfer files to the Hadoop Distributed File System (HDFS)">
<intro>
Right-click on an existing directory in the DFS.<br />
Choose the <b>Import from local directory option.</b>
<br />
Note that files can only be uploaded to the HDFS at this time.
</intro>
</task>
</taskGroup>
</taskGroup>
</compositeCheatsheet>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.0 KiB

View File

@ -1,24 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<cheatsheet title="MapReduce project run tutorial">
<intro>
<description>
This tutorial informs you how to run your newly created
MapReduce Project in one of two fashions: locally as a Java
Application, or on a Hadoop Server.
</description>
</intro>
<item title="Run as Java Application">
<description>
To run your MapReduce application locally, right-click on
your Driver class in the Package Explorer and select <b>Run as
/ Java Application</b>.
</description>
</item>
<item title="Run on Hadoop Server">
<description>
To run your MapReduce application on a Hadoop server, right-click on
your Driver class in the Package Explorer and select <b>Run as
/ Run on Hadoop</b>.
</description>
</item>
</cheatsheet>

View File

@ -1,25 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<cheatsheet title="Set default Hadoop path tutorial">
<intro>
<description>
This tutorial informs you how to set the default Hadoop
directory for the plugin.
</description>
</intro>
<item title="Open Plugin Preferences window">
<description>
To set the default Hadoop directory, open the plugin
preferences from the menu option
<b>Window > Preferences</b>. <br />
Go to the <b>Hadoop Home Directory</b>
preference, and enter the installation directory there.
Use the following embedded command to open the Preferences
window:
</description>
<action pluginId="org.eclipse.jdt.ui"
class="org.eclipse.ui.internal.OpenPreferencesAction" />
</item>
</cheatsheet>

View File

@ -1,18 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<cheatsheet title="Open Browser">
<intro>
<description>This cheat sheet launches a browser to the Hadoop website.</description>
</intro>
<item title="Open Browser">
<description>
Go to http://hadoop.apache.org/core/, and follow
links to download the latest stable distribution of
Hadoop.
Use the following embedded command to launch the Hadoop Web site
in a browser</description>
<command serialization=
"org.eclipse.ui.browser.openBrowser(url=http://hadoop.apache.org/core)"/>
</item>
</cheatsheet>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 661 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 820 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 339 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 808 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 59 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 930 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 888 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 851 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 456 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 988 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 853 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 986 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 838 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 162 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 395 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 745 B

View File

@ -1,77 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse;
import org.apache.hadoop.eclipse.servers.ServerRegistry;
import org.eclipse.ui.plugin.AbstractUIPlugin;
import org.osgi.framework.BundleContext;
/**
* The activator class controls the plug-in life cycle
*/
public class Activator extends AbstractUIPlugin {
/**
* The plug-in ID
*/
public static final String PLUGIN_ID = "org.apache.hadoop.eclipse";
/**
* The shared unique instance (singleton)
*/
private static Activator plugin;
/**
* Constructor
*/
public Activator() {
synchronized (Activator.class) {
if (plugin != null) {
// Not a singleton!?
throw new RuntimeException("Activator for " + PLUGIN_ID
+ " is not a singleton");
}
plugin = this;
}
}
/* @inheritDoc */
@Override
public void start(BundleContext context) throws Exception {
super.start(context);
}
/* @inheritDoc */
@Override
public void stop(BundleContext context) throws Exception {
ServerRegistry.getInstance().dispose();
plugin = null;
super.stop(context);
}
/**
* Returns the shared unique instance (singleton)
*
* @return the shared unique instance (singleton)
*/
public static Activator getDefault() {
return plugin;
}
}

View File

@ -1,45 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.swt.widgets.Display;
/**
* Error dialog helper
*/
public class ErrorMessageDialog {
public static void display(final String title, final String message) {
Display.getDefault().syncExec(new Runnable() {
public void run() {
MessageDialog.openError(Display.getDefault().getActiveShell(),
title, message);
}
});
}
public static void display(Exception e) {
display("An exception has occured!", "Exception description:\n"
+ e.getLocalizedMessage());
}
}

View File

@ -1,95 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse;
import org.eclipse.debug.ui.IDebugUIConstants;
import org.eclipse.jdt.ui.JavaUI;
import org.eclipse.ui.IFolderLayout;
import org.eclipse.ui.IPageLayout;
import org.eclipse.ui.IPerspectiveFactory;
import org.eclipse.ui.console.IConsoleConstants;
/**
* Creates links to the new MapReduce-based wizards and views for a MapReduce
* perspective
*
*/
public class HadoopPerspectiveFactory implements IPerspectiveFactory {
public void createInitialLayout(IPageLayout layout) {
layout.addNewWizardShortcut("org.apache.hadoop.eclipse.NewDriverWizard");
layout.addNewWizardShortcut("org.apache.hadoop.eclipse.NewMapperWizard");
layout
.addNewWizardShortcut("org.apache.hadoop.eclipse.NewReducerWizard");
IFolderLayout left =
layout.createFolder("org.apache.hadoop.eclipse.perspective.left",
IPageLayout.LEFT, 0.2f, layout.getEditorArea());
left.addView("org.eclipse.ui.navigator.ProjectExplorer");
IFolderLayout bottom =
layout.createFolder("org.apache.hadoop.eclipse.perspective.bottom",
IPageLayout.BOTTOM, 0.7f, layout.getEditorArea());
bottom.addView(IPageLayout.ID_PROBLEM_VIEW);
bottom.addView(IPageLayout.ID_TASK_LIST);
bottom.addView(JavaUI.ID_JAVADOC_VIEW);
bottom.addView("org.apache.hadoop.eclipse.view.servers");
bottom.addPlaceholder(JavaUI.ID_SOURCE_VIEW);
bottom.addPlaceholder(IPageLayout.ID_PROGRESS_VIEW);
bottom.addPlaceholder(IConsoleConstants.ID_CONSOLE_VIEW);
bottom.addPlaceholder(IPageLayout.ID_BOOKMARKS);
IFolderLayout right =
layout.createFolder("org.apache.hadoop.eclipse.perspective.right",
IPageLayout.RIGHT, 0.8f, layout.getEditorArea());
right.addView(IPageLayout.ID_OUTLINE);
right.addView("org.eclipse.ui.cheatsheets.views.CheatSheetView");
// right.addView(layout.ID); .. cheat sheet here
layout.addActionSet(IDebugUIConstants.LAUNCH_ACTION_SET);
layout.addActionSet(JavaUI.ID_ACTION_SET);
layout.addActionSet(JavaUI.ID_CODING_ACTION_SET);
layout.addActionSet(JavaUI.ID_ELEMENT_CREATION_ACTION_SET);
layout.addActionSet(IPageLayout.ID_NAVIGATE_ACTION_SET);
layout.addActionSet(JavaUI.ID_SEARCH_ACTION_SET);
layout
.addNewWizardShortcut("org.eclipse.jdt.ui.wizards.NewPackageCreationWizard");
layout
.addNewWizardShortcut("org.eclipse.jdt.ui.wizards.NewClassCreationWizard");
layout
.addNewWizardShortcut("org.eclipse.jdt.ui.wizards.NewInterfaceCreationWizard");
layout
.addNewWizardShortcut("org.eclipse.jdt.ui.wizards.NewEnumCreationWizard");
layout
.addNewWizardShortcut("org.eclipse.jdt.ui.wizards.NewAnnotationCreationWizard");
layout
.addNewWizardShortcut("org.eclipse.jdt.ui.wizards.NewSourceFolderCreationWizard");
layout
.addNewWizardShortcut("org.eclipse.jdt.ui.wizards.NewSnippetFileCreationWizard");
layout.addNewWizardShortcut("org.eclipse.ui.wizards.new.folder");
layout.addNewWizardShortcut("org.eclipse.ui.wizards.new.file");
layout
.addNewWizardShortcut("org.eclipse.ui.editors.wizards.UntitledTextFileWizard");
// CheatSheetViewerFactory.createCheatSheetView().setInput("org.apache.hadoop.eclipse.cheatsheet");
}
}

View File

@ -1,252 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
import org.eclipse.core.runtime.FileLocator;
import org.eclipse.core.runtime.Path;
import org.eclipse.jface.resource.ImageDescriptor;
import org.eclipse.swt.graphics.Image;
import org.eclipse.ui.ISharedImages;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.plugin.AbstractUIPlugin;
import org.osgi.framework.Bundle;
/**
* Icons manager
*/
public class ImageLibrary {
private final Bundle bundle = Activator.getDefault().getBundle();
/**
* Singleton instance
*/
private static volatile ImageLibrary instance = null;
private ISharedImages sharedImages =
PlatformUI.getWorkbench().getSharedImages();
/**
* Where resources (icons, images...) are available in the Bundle
*/
private static final String RESOURCE_DIR = "resources/";
/**
* Public access to image descriptors
*
* @param name
* @return the image descriptor
*/
public static ImageDescriptor get(String name) {
return getInstance().getImageDescriptorByName(name);
}
/**
* Public access to images
*
* @param name
* @return the image
*/
public static Image getImage(String name) {
return getInstance().getImageByName(name);
}
/**
* Singleton access
*
* @return the Image library
*/
public static ImageLibrary getInstance() {
if (instance == null) {
synchronized (ImageLibrary.class) {
if (instance == null)
instance = new ImageLibrary();
}
}
return instance;
}
/**
* Map of registered resources (ImageDescriptor and Image)
*/
private Map<String, ImageDescriptor> descMap =
new HashMap<String, ImageDescriptor>();
private Map<String, Image> imageMap = new HashMap<String, Image>();
/**
* Image library constructor: put image definitions here.
*/
private ImageLibrary() {
/*
* Servers view
*/
newImage("server.view.location.entry", "Elephant-24x24.png");
newImage("server.view.job.entry", "job.gif");
newImage("server.view.action.location.new", "location-new-16x16.png");
newImage("server.view.action.location.edit", "location-edit-16x16.png");
newSharedImage("server.view.action.delete",
ISharedImages.IMG_TOOL_DELETE);
/*
* DFS Browser
*/
newImage("dfs.browser.root.entry", "files.gif");
newImage("dfs.browser.location.entry", "Elephant-16x16.png");
newSharedImage("dfs.browser.folder.entry", ISharedImages.IMG_OBJ_FOLDER);
newSharedImage("dfs.browser.file.entry", ISharedImages.IMG_OBJ_FILE);
// DFS files in editor
newSharedImage("dfs.file.editor", ISharedImages.IMG_OBJ_FILE);
// Actions
newImage("dfs.browser.action.mkdir", "new-folder.png");
newImage("dfs.browser.action.download", "download.png");
newImage("dfs.browser.action.upload_files", "upload.png");
newImage("dfs.browser.action.upload_dir", "upload.png");
newSharedImage("dfs.browser.action.delete",
ISharedImages.IMG_TOOL_DELETE);
newImage("dfs.browser.action.refresh", "refresh.png");
/*
* Wizards
*/
newImage("wizard.mapper.new", "mapwiz.png");
newImage("wizard.reducer.new", "reducewiz.png");
newImage("wizard.driver.new", "driverwiz.png");
newImage("wizard.mapreduce.project.new", "projwiz.png");
}
/**
* Accessor to images
*
* @param name
* @return
*/
private ImageDescriptor getImageDescriptorByName(String name) {
return this.descMap.get(name);
}
/**
* Accessor to images
*
* @param name
* @return
*/
private Image getImageByName(String name) {
return this.imageMap.get(name);
}
/**
* Access to platform shared images
*
* @param name
* @return
*/
private ImageDescriptor getSharedByName(String name) {
return sharedImages.getImageDescriptor(name);
}
/**
* Load and register a new image. If the image resource does not exist or
* fails to load, a default "error" resource is supplied.
*
* @param name name of the image
* @param filename name of the file containing the image
* @return whether the image has correctly been loaded
*/
private boolean newImage(String name, String filename) {
ImageDescriptor id;
boolean success;
try {
URL fileURL =
FileLocator.find(bundle, new Path(RESOURCE_DIR + filename), null);
id = ImageDescriptor.createFromURL(FileLocator.toFileURL(fileURL));
success = true;
} catch (Exception e) {
e.printStackTrace();
id = ImageDescriptor.getMissingImageDescriptor();
// id = getSharedByName(ISharedImages.IMG_OBJS_ERROR_TSK);
success = false;
}
descMap.put(name, id);
imageMap.put(name, id.createImage(true));
return success;
}
/**
* Register an image from the workspace shared image pool. If the image
* resource does not exist or fails to load, a default "error" resource is
* supplied.
*
* @param name name of the image
* @param sharedName name of the shared image ({@link ISharedImages})
* @return whether the image has correctly been loaded
*/
private boolean newSharedImage(String name, String sharedName) {
boolean success = true;
ImageDescriptor id = getSharedByName(sharedName);
if (id == null) {
id = ImageDescriptor.getMissingImageDescriptor();
// id = getSharedByName(ISharedImages.IMG_OBJS_ERROR_TSK);
success = false;
}
descMap.put(name, id);
imageMap.put(name, id.createImage(true));
return success;
}
/**
* Register an image from the workspace shared image pool. If the image
* resource does not exist or fails to load, a default "error" resource is
* supplied.
*
* @param name name of the image
* @param sharedName name of the shared image ({@link ISharedImages})
* @return whether the image has correctly been loaded
*/
private boolean newPluginImage(String name, String pluginId,
String filename) {
boolean success = true;
ImageDescriptor id =
AbstractUIPlugin.imageDescriptorFromPlugin(pluginId, filename);
if (id == null) {
id = ImageDescriptor.getMissingImageDescriptor();
// id = getSharedByName(ISharedImages.IMG_OBJS_ERROR_TSK);
success = false;
}
descMap.put(name, id);
imageMap.put(name, id.createImage(true));
return success;
}
}

View File

@ -1,146 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse;
import java.io.File;
import java.io.FileFilter;
import java.net.URL;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IProjectNature;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.core.runtime.Path;
import org.eclipse.core.runtime.QualifiedName;
import org.eclipse.jdt.core.IClasspathEntry;
import org.eclipse.jdt.core.IJavaProject;
import org.eclipse.jdt.core.JavaCore;
/**
* Class to configure and deconfigure an Eclipse project with the MapReduce
* project nature.
*/
public class MapReduceNature implements IProjectNature {
public static final String ID = "org.apache.hadoop.eclipse.Nature";
private IProject project;
static Logger log = Logger.getLogger(MapReduceNature.class.getName());
/**
* Configures an Eclipse project as a Map/Reduce project by adding the
* Hadoop libraries to a project's classpath.
*/
public void configure() throws CoreException {
String path =
project.getPersistentProperty(new QualifiedName(Activator.PLUGIN_ID,
"hadoop.runtime.path"));
File dir = new File(path);
final ArrayList<File> coreJars = new ArrayList<File>();
dir.listFiles(new FileFilter() {
public boolean accept(File pathname) {
String fileName = pathname.getName();
// get the hadoop core jar without touching test or examples
// older version of hadoop don't use the word "core" -- eyhung
if ((fileName.indexOf("hadoop") != -1) && (fileName.endsWith("jar"))
&& (fileName.indexOf("test") == -1)
&& (fileName.indexOf("examples") == -1)) {
coreJars.add(pathname);
}
return false; // we don't care what this returns
}
});
File dir2 = new File(path + File.separatorChar + "lib");
if (dir2.exists() && dir2.isDirectory()) {
dir2.listFiles(new FileFilter() {
public boolean accept(File pathname) {
if ((!pathname.isDirectory())
&& (pathname.getName().endsWith("jar"))) {
coreJars.add(pathname);
}
return false; // we don't care what this returns
}
});
}
// Add Hadoop libraries onto classpath
IJavaProject javaProject = JavaCore.create(getProject());
// Bundle bundle = Activator.getDefault().getBundle();
try {
IClasspathEntry[] currentCp = javaProject.getRawClasspath();
IClasspathEntry[] newCp =
new IClasspathEntry[currentCp.length + coreJars.size()];
System.arraycopy(currentCp, 0, newCp, 0, currentCp.length);
final Iterator<File> i = coreJars.iterator();
int count = 0;
while (i.hasNext()) {
// for (int i = 0; i < s_coreJarNames.length; i++) {
final File f = (File) i.next();
// URL url = FileLocator.toFileURL(FileLocator.find(bundle, new
// Path("lib/" + s_coreJarNames[i]), null));
URL url = f.toURI().toURL();
log.finer("hadoop library url.getPath() = " + url.getPath());
newCp[newCp.length - 1 - count] =
JavaCore.newLibraryEntry(new Path(url.getPath()), null, null);
count++;
}
javaProject.setRawClasspath(newCp, new NullProgressMonitor());
} catch (Exception e) {
log.log(Level.SEVERE, "IOException generated in "
+ this.getClass().getCanonicalName(), e);
}
}
/**
* Deconfigure a project from MapReduce status. Currently unimplemented.
*/
public void deconfigure() throws CoreException {
// TODO Auto-generated method stub
}
/**
* Returns the project to which this project nature applies.
*/
public IProject getProject() {
return this.project;
}
/**
* Sets the project to which this nature applies. Used when instantiating
* this project nature runtime.
*/
public void setProject(IProject project) {
this.project = project;
}
}

View File

@ -1,99 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.jdt.core.IJavaElement;
import org.eclipse.jdt.internal.ui.wizards.NewElementWizard;
import org.eclipse.jface.operation.IRunnableWithProgress;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.ui.INewWizard;
import org.eclipse.ui.IWorkbench;
/**
* Wizard for creating a new Driver class (a class that runs a MapReduce job).
*
*/
public class NewDriverWizard extends NewElementWizard implements INewWizard,
IRunnableWithProgress {
private NewDriverWizardPage page;
/*
* @Override public boolean performFinish() { }
*/
public void run(IProgressMonitor monitor) {
try {
page.createType(monitor);
} catch (CoreException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public NewDriverWizard() {
setWindowTitle("New MapReduce Driver");
}
@Override
public void init(IWorkbench workbench, IStructuredSelection selection) {
super.init(workbench, selection);
page = new NewDriverWizardPage();
addPage(page);
page.setSelection(selection);
}
@Override
/**
* Performs any actions appropriate in response to the user having pressed the
* Finish button, or refuse if finishing now is not permitted.
*/
public boolean performFinish() {
if (super.performFinish()) {
if (getCreatedElement() != null) {
selectAndReveal(page.getModifiedResource());
openResource((IFile) page.getModifiedResource());
}
return true;
} else {
return false;
}
}
@Override
/**
*
*/
protected void finishPage(IProgressMonitor monitor)
throws InterruptedException, CoreException {
this.run(monitor);
}
@Override
public IJavaElement getCreatedElement() {
return page.getCreatedType().getPrimaryElement();
}
}

View File

@ -1,263 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse;
import java.io.IOException;
import java.util.ArrayList;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.FileLocator;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Path;
import org.eclipse.jdt.core.IType;
import org.eclipse.jdt.core.JavaModelException;
import org.eclipse.jdt.core.search.SearchEngine;
import org.eclipse.jdt.ui.IJavaElementSearchConstants;
import org.eclipse.jdt.ui.JavaUI;
import org.eclipse.jdt.ui.wizards.NewTypeWizardPage;
import org.eclipse.jface.dialogs.ProgressMonitorDialog;
import org.eclipse.jface.resource.ImageDescriptor;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.jface.window.Window;
import org.eclipse.swt.SWT;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.dialogs.SelectionDialog;
/**
* Pre-fills the new MapReduce driver class with a template.
*
*/
public class NewDriverWizardPage extends NewTypeWizardPage {
private Button isCreateMapMethod;
private Text reducerText;
private Text mapperText;
private final boolean showContainerSelector;
public NewDriverWizardPage() {
this(true);
}
public NewDriverWizardPage(boolean showContainerSelector) {
super(true, "MapReduce Driver");
this.showContainerSelector = showContainerSelector;
setTitle("MapReduce Driver");
setDescription("Create a new MapReduce driver.");
setImageDescriptor(ImageLibrary.get("wizard.driver.new"));
}
public void setSelection(IStructuredSelection selection) {
initContainerPage(getInitialJavaElement(selection));
initTypePage(getInitialJavaElement(selection));
}
@Override
/**
* Creates the new type using the entered field values.
*/
public void createType(IProgressMonitor monitor) throws CoreException,
InterruptedException {
super.createType(monitor);
}
@Override
protected void createTypeMembers(final IType newType, ImportsManager imports,
final IProgressMonitor monitor) throws CoreException {
super.createTypeMembers(newType, imports, monitor);
imports.addImport("org.apache.hadoop.fs.Path");
imports.addImport("org.apache.hadoop.io.Text");
imports.addImport("org.apache.hadoop.io.IntWritable");
imports.addImport("org.apache.hadoop.mapred.JobClient");
imports.addImport("org.apache.hadoop.mapred.JobConf");
imports.addImport("org.apache.hadoop.mapred.Reducer");
imports.addImport("org.apache.hadoop.mapred.Mapper");
/**
* TODO(jz) - move most code out of the runnable
*/
getContainer().getShell().getDisplay().syncExec(new Runnable() {
public void run() {
String method = "public static void main(String[] args) {\n JobClient client = new JobClient();";
method += "JobConf conf = new JobConf("
+ newType.getFullyQualifiedName() + ".class);\n\n";
method += "// TODO: specify output types\nconf.setOutputKeyClass(Text.class);\nconf.setOutputValueClass(IntWritable.class);\n\n";
method += "// TODO: specify input and output DIRECTORIES (not files)\nconf.setInputPath(new Path(\"src\"));\nconf.setOutputPath(new Path(\"out\"));\n\n";
if (mapperText.getText().length() > 0) {
method += "conf.setMapperClass(" + mapperText.getText()
+ ".class);\n\n";
} else {
method += "// TODO: specify a mapper\nconf.setMapperClass(org.apache.hadoop.mapred.lib.IdentityMapper.class);\n\n";
}
if (reducerText.getText().length() > 0) {
method += "conf.setReducerClass(" + reducerText.getText()
+ ".class);\n\n";
} else {
method += "// TODO: specify a reducer\nconf.setReducerClass(org.apache.hadoop.mapred.lib.IdentityReducer.class);\n\n";
}
method += "client.setConf(conf);\n";
method += "try {\n\tJobClient.runJob(conf);\n} catch (Exception e) {\n"
+ "\te.printStackTrace();\n}\n";
method += "}\n";
try {
newType.createMethod(method, null, false, monitor);
} catch (JavaModelException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
});
}
public void createControl(Composite parent) {
// super.createControl(parent);
initializeDialogUnits(parent);
Composite composite = new Composite(parent, SWT.NONE);
GridLayout layout = new GridLayout();
layout.numColumns = 4;
composite.setLayout(layout);
createContainerControls(composite, 4);
createPackageControls(composite, 4);
createSeparator(composite, 4);
createTypeNameControls(composite, 4);
createSuperClassControls(composite, 4);
createSuperInterfacesControls(composite, 4);
createSeparator(composite, 4);
createMapperControls(composite);
createReducerControls(composite);
if (!showContainerSelector) {
setPackageFragmentRoot(null, false);
setSuperClass("java.lang.Object", false);
setSuperInterfaces(new ArrayList(), false);
}
setControl(composite);
setFocus();
handleFieldChanged(CONTAINER);
// setSuperClass("org.apache.hadoop.mapred.MapReduceBase", true);
// setSuperInterfaces(Arrays.asList(new String[]{
// "org.apache.hadoop.mapred.Mapper" }), true);
}
@Override
protected void handleFieldChanged(String fieldName) {
super.handleFieldChanged(fieldName);
validate();
}
private void validate() {
if (showContainerSelector) {
updateStatus(new IStatus[] { fContainerStatus, fPackageStatus,
fTypeNameStatus, fSuperClassStatus, fSuperInterfacesStatus });
} else {
updateStatus(new IStatus[] { fTypeNameStatus, });
}
}
private void createMapperControls(Composite composite) {
this.mapperText = createBrowseClassControl(composite, "Ma&pper:",
"&Browse...", "org.apache.hadoop.mapred.Mapper", "Mapper Selection");
}
private void createReducerControls(Composite composite) {
this.reducerText = createBrowseClassControl(composite, "&Reducer:",
"Browse&...", "org.apache.hadoop.mapred.Reducer", "Reducer Selection");
}
private Text createBrowseClassControl(final Composite composite,
final String string, String browseButtonLabel,
final String baseClassName, final String dialogTitle) {
Label label = new Label(composite, SWT.NONE);
GridData data = new GridData(GridData.FILL_HORIZONTAL);
label.setText(string);
label.setLayoutData(data);
final Text text = new Text(composite, SWT.SINGLE | SWT.BORDER);
GridData data2 = new GridData(GridData.FILL_HORIZONTAL);
data2.horizontalSpan = 2;
text.setLayoutData(data2);
Button browse = new Button(composite, SWT.NONE);
browse.setText(browseButtonLabel);
GridData data3 = new GridData(GridData.FILL_HORIZONTAL);
browse.setLayoutData(data3);
browse.addListener(SWT.Selection, new Listener() {
public void handleEvent(Event event) {
IType baseType;
try {
baseType = getPackageFragmentRoot().getJavaProject().findType(
baseClassName);
// edit this to limit the scope
SelectionDialog dialog = JavaUI.createTypeDialog(
composite.getShell(), new ProgressMonitorDialog(composite
.getShell()), SearchEngine.createHierarchyScope(baseType),
IJavaElementSearchConstants.CONSIDER_CLASSES, false);
dialog.setMessage("&Choose a type:");
dialog.setBlockOnOpen(true);
dialog.setTitle(dialogTitle);
dialog.open();
if ((dialog.getReturnCode() == Window.OK)
&& (dialog.getResult().length > 0)) {
IType type = (IType) dialog.getResult()[0];
text.setText(type.getFullyQualifiedName());
}
} catch (JavaModelException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
});
if (!showContainerSelector) {
label.setEnabled(false);
text.setEnabled(false);
browse.setEnabled(false);
}
return text;
}
}

View File

@ -1,411 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse;
import java.io.File;
import java.io.FilenameFilter;
import java.lang.reflect.InvocationTargetException;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.hadoop.eclipse.preferences.MapReducePreferencePage;
import org.apache.hadoop.eclipse.preferences.PreferenceConstants;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IProjectDescription;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IConfigurationElement;
import org.eclipse.core.runtime.IExecutableExtension;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.core.runtime.Path;
import org.eclipse.core.runtime.QualifiedName;
import org.eclipse.core.runtime.SubProgressMonitor;
import org.eclipse.jdt.ui.wizards.NewJavaProjectWizardPage;
import org.eclipse.jface.dialogs.IDialogConstants;
import org.eclipse.jface.operation.IRunnableWithProgress;
import org.eclipse.jface.preference.PreferenceDialog;
import org.eclipse.jface.preference.PreferenceManager;
import org.eclipse.jface.preference.PreferenceNode;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.jface.wizard.IWizardPage;
import org.eclipse.jface.wizard.Wizard;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.DirectoryDialog;
import org.eclipse.swt.widgets.Group;
import org.eclipse.swt.widgets.Link;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.IWorkbench;
import org.eclipse.ui.IWorkbenchWizard;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.dialogs.WizardNewProjectCreationPage;
import org.eclipse.ui.wizards.newresource.BasicNewProjectResourceWizard;
/**
* Wizard for creating a new MapReduce Project
*
*/
public class NewMapReduceProjectWizard extends Wizard implements
IWorkbenchWizard, IExecutableExtension {
static Logger log =
Logger.getLogger(NewMapReduceProjectWizard.class.getName());
private HadoopFirstPage firstPage;
private NewJavaProjectWizardPage javaPage;
public NewDriverWizardPage newDriverPage;
private IConfigurationElement config;
public NewMapReduceProjectWizard() {
setWindowTitle("New MapReduce Project Wizard");
}
public void init(IWorkbench workbench, IStructuredSelection selection) {
}
@Override
public boolean canFinish() {
return firstPage.isPageComplete() && javaPage.isPageComplete()
// && ((!firstPage.generateDriver.getSelection())
// || newDriverPage.isPageComplete()
;
}
@Override
public IWizardPage getNextPage(IWizardPage page) {
// if (page == firstPage
// && firstPage.generateDriver.getSelection()
// )
// {
// return newDriverPage; // if "generate mapper" checked, second page is
// new driver page
// }
// else
// {
IWizardPage answer = super.getNextPage(page);
if (answer == newDriverPage) {
return null; // dont flip to new driver page unless "generate
// driver" is checked
} else if (answer == javaPage) {
return answer;
} else {
return answer;
}
// }
}
@Override
public IWizardPage getPreviousPage(IWizardPage page) {
if (page == newDriverPage) {
return firstPage; // newDriverPage, if it appears, is the second
// page
} else {
return super.getPreviousPage(page);
}
}
static class HadoopFirstPage extends WizardNewProjectCreationPage
implements SelectionListener {
public HadoopFirstPage() {
super("New Hadoop Project");
setImageDescriptor(ImageLibrary.get("wizard.mapreduce.project.new"));
}
private Link openPreferences;
private Button workspaceHadoop;
private Button projectHadoop;
private Text location;
private Button browse;
private String path;
public String currentPath;
// private Button generateDriver;
@Override
public void createControl(Composite parent) {
super.createControl(parent);
setTitle("MapReduce Project");
setDescription("Create a MapReduce project.");
Group group = new Group((Composite) getControl(), SWT.NONE);
group.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
group.setText("Hadoop MapReduce Library Installation Path");
GridLayout layout = new GridLayout(3, true);
layout.marginLeft =
convertHorizontalDLUsToPixels(IDialogConstants.HORIZONTAL_MARGIN);
layout.marginRight =
convertHorizontalDLUsToPixels(IDialogConstants.HORIZONTAL_MARGIN);
layout.marginTop =
convertHorizontalDLUsToPixels(IDialogConstants.VERTICAL_MARGIN);
layout.marginBottom =
convertHorizontalDLUsToPixels(IDialogConstants.VERTICAL_MARGIN);
group.setLayout(layout);
workspaceHadoop = new Button(group, SWT.RADIO);
GridData d =
new GridData(GridData.BEGINNING, GridData.BEGINNING, false, false);
d.horizontalSpan = 2;
workspaceHadoop.setLayoutData(d);
// workspaceHadoop.setText("Use default workbench Hadoop library
// location");
workspaceHadoop.setSelection(true);
updateHadoopDirLabelFromPreferences();
openPreferences = new Link(group, SWT.NONE);
openPreferences
.setText("<a>Configure Hadoop install directory...</a>");
openPreferences.setLayoutData(new GridData(GridData.END,
GridData.CENTER, false, false));
openPreferences.addSelectionListener(this);
projectHadoop = new Button(group, SWT.RADIO);
projectHadoop.setLayoutData(new GridData(GridData.BEGINNING,
GridData.CENTER, false, false));
projectHadoop.setText("Specify Hadoop library location");
location = new Text(group, SWT.SINGLE | SWT.BORDER);
location.setText("");
d = new GridData(GridData.END, GridData.CENTER, true, false);
d.horizontalSpan = 1;
d.widthHint = 250;
d.grabExcessHorizontalSpace = true;
location.setLayoutData(d);
location.setEnabled(false);
browse = new Button(group, SWT.NONE);
browse.setText("Browse...");
browse.setLayoutData(new GridData(GridData.BEGINNING, GridData.CENTER,
false, false));
browse.setEnabled(false);
browse.addSelectionListener(this);
projectHadoop.addSelectionListener(this);
workspaceHadoop.addSelectionListener(this);
// generateDriver = new Button((Composite) getControl(), SWT.CHECK);
// generateDriver.setText("Generate a MapReduce driver");
// generateDriver.addListener(SWT.Selection, new Listener()
// {
// public void handleEvent(Event event) {
// getContainer().updateButtons(); }
// });
}
@Override
public boolean isPageComplete() {
boolean validHadoop = validateHadoopLocation();
if (!validHadoop && isCurrentPage()) {
setErrorMessage("Invalid Hadoop Runtime specified; please click 'Configure Hadoop install directory' or fill in library location input field");
} else {
setErrorMessage(null);
}
return super.isPageComplete() && validHadoop;
}
private boolean validateHadoopLocation() {
FilenameFilter gotHadoopJar = new FilenameFilter() {
public boolean accept(File dir, String name) {
return (name.startsWith("hadoop") && name.endsWith(".jar")
&& (name.indexOf("test") == -1) && (name.indexOf("examples") == -1));
}
};
if (workspaceHadoop.getSelection()) {
this.currentPath = path;
return new Path(path).toFile().exists()
&& (new Path(path).toFile().list(gotHadoopJar).length > 0);
} else {
this.currentPath = location.getText();
File file = new Path(location.getText()).toFile();
return file.exists()
&& (new Path(location.getText()).toFile().list(gotHadoopJar).length > 0);
}
}
private void updateHadoopDirLabelFromPreferences() {
path =
Activator.getDefault().getPreferenceStore().getString(
PreferenceConstants.P_PATH);
if ((path != null) && (path.length() > 0)) {
workspaceHadoop.setText("Use default Hadoop");
} else {
workspaceHadoop.setText("Use default Hadoop (currently not set)");
}
}
public void widgetDefaultSelected(SelectionEvent e) {
}
public void widgetSelected(SelectionEvent e) {
if (e.getSource() == openPreferences) {
PreferenceManager manager = new PreferenceManager();
manager.addToRoot(new PreferenceNode(
"Hadoop Installation Directory", new MapReducePreferencePage()));
PreferenceDialog dialog =
new PreferenceDialog(this.getShell(), manager);
dialog.create();
dialog.setMessage("Select Hadoop Installation Directory");
dialog.setBlockOnOpen(true);
dialog.open();
updateHadoopDirLabelFromPreferences();
} else if (e.getSource() == browse) {
DirectoryDialog dialog = new DirectoryDialog(this.getShell());
dialog
.setMessage("Select a hadoop installation, containing hadoop-X-core.jar");
dialog.setText("Select Hadoop Installation Directory");
String directory = dialog.open();
if (directory != null) {
location.setText(directory);
if (!validateHadoopLocation()) {
setErrorMessage("No Hadoop jar found in specified directory");
} else {
setErrorMessage(null);
}
}
} else if (projectHadoop.getSelection()) {
location.setEnabled(true);
browse.setEnabled(true);
} else {
location.setEnabled(false);
browse.setEnabled(false);
}
getContainer().updateButtons();
}
}
@Override
public void addPages() {
/*
* firstPage = new HadoopFirstPage(); addPage(firstPage ); addPage( new
* JavaProjectWizardSecondPage(firstPage) );
*/
firstPage = new HadoopFirstPage();
javaPage =
new NewJavaProjectWizardPage(ResourcesPlugin.getWorkspace()
.getRoot(), firstPage);
// newDriverPage = new NewDriverWizardPage(false);
// newDriverPage.setPageComplete(false); // ensure finish button
// initially disabled
addPage(firstPage);
addPage(javaPage);
// addPage(newDriverPage);
}
@Override
public boolean performFinish() {
try {
PlatformUI.getWorkbench().getProgressService().runInUI(
this.getContainer(), new IRunnableWithProgress() {
public void run(IProgressMonitor monitor) {
try {
monitor.beginTask("Create Hadoop Project", 300);
javaPage.getRunnable().run(
new SubProgressMonitor(monitor, 100));
// if( firstPage.generateDriver.getSelection())
// {
// newDriverPage.setPackageFragmentRoot(javaPage.getNewJavaProject().getAllPackageFragmentRoots()[0],
// false);
// newDriverPage.getRunnable().run(new
// SubProgressMonitor(monitor,100));
// }
IProject project =
javaPage.getNewJavaProject().getResource().getProject();
IProjectDescription description = project.getDescription();
String[] existingNatures = description.getNatureIds();
String[] natures = new String[existingNatures.length + 1];
for (int i = 0; i < existingNatures.length; i++) {
natures[i + 1] = existingNatures[i];
}
natures[0] = MapReduceNature.ID;
description.setNatureIds(natures);
project.setPersistentProperty(new QualifiedName(
Activator.PLUGIN_ID, "hadoop.runtime.path"),
firstPage.currentPath);
project.setDescription(description,
new NullProgressMonitor());
String[] natureIds = project.getDescription().getNatureIds();
for (int i = 0; i < natureIds.length; i++) {
log.fine("Nature id # " + i + " > " + natureIds[i]);
}
monitor.worked(100);
monitor.done();
BasicNewProjectResourceWizard.updatePerspective(config);
} catch (CoreException e) {
// TODO Auto-generated catch block
log.log(Level.SEVERE, "CoreException thrown.", e);
} catch (InvocationTargetException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}, null);
} catch (InvocationTargetException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return true;
}
public void setInitializationData(IConfigurationElement config,
String propertyName, Object data) throws CoreException {
this.config = config;
}
}

View File

@ -1,181 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse;
import java.io.IOException;
import java.util.Arrays;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.FileLocator;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Path;
import org.eclipse.jdt.core.IJavaElement;
import org.eclipse.jdt.core.IType;
import org.eclipse.jdt.internal.ui.wizards.NewElementWizard;
import org.eclipse.jdt.ui.wizards.NewTypeWizardPage;
import org.eclipse.jface.operation.IRunnableWithProgress;
import org.eclipse.jface.resource.ImageDescriptor;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.swt.SWT;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.ui.INewWizard;
import org.eclipse.ui.IWorkbench;
/**
* Wizard for creating a new Mapper class (a class that runs the Map portion
* of a MapReduce job). The class is pre-filled with a template.
*
*/
public class NewMapperWizard extends NewElementWizard implements INewWizard,
IRunnableWithProgress {
private Page page;
public NewMapperWizard() {
setWindowTitle("New Mapper");
}
public void run(IProgressMonitor monitor) {
try {
page.createType(monitor);
} catch (CoreException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
@Override
public void init(IWorkbench workbench, IStructuredSelection selection) {
super.init(workbench, selection);
page = new Page();
addPage(page);
page.setSelection(selection);
}
public static class Page extends NewTypeWizardPage {
private Button isCreateMapMethod;
public Page() {
super(true, "Mapper");
setTitle("Mapper");
setDescription("Create a new Mapper implementation.");
setImageDescriptor(ImageLibrary.get("wizard.mapper.new"));
}
public void setSelection(IStructuredSelection selection) {
initContainerPage(getInitialJavaElement(selection));
initTypePage(getInitialJavaElement(selection));
}
@Override
public void createType(IProgressMonitor monitor) throws CoreException,
InterruptedException {
super.createType(monitor);
}
@Override
protected void createTypeMembers(IType newType, ImportsManager imports,
IProgressMonitor monitor) throws CoreException {
super.createTypeMembers(newType, imports, monitor);
imports.addImport("java.io.IOException");
imports.addImport("org.apache.hadoop.io.WritableComparable");
imports.addImport("org.apache.hadoop.io.Writable");
imports.addImport("org.apache.hadoop.mapred.OutputCollector");
imports.addImport("org.apache.hadoop.mapred.Reporter");
newType
.createMethod(
"public void map(WritableComparable key, Writable values, OutputCollector output, Reporter reporter) throws IOException \n{\n}\n",
null, false, monitor);
}
public void createControl(Composite parent) {
// super.createControl(parent);
initializeDialogUnits(parent);
Composite composite = new Composite(parent, SWT.NONE);
GridLayout layout = new GridLayout();
layout.numColumns = 4;
composite.setLayout(layout);
createContainerControls(composite, 4);
createPackageControls(composite, 4);
createSeparator(composite, 4);
createTypeNameControls(composite, 4);
createSuperClassControls(composite, 4);
createSuperInterfacesControls(composite, 4);
// createSeparator(composite, 4);
setControl(composite);
setSuperClass("org.apache.hadoop.mapred.MapReduceBase", true);
setSuperInterfaces(Arrays
.asList(new String[] { "org.apache.hadoop.mapred.Mapper" }), true);
setFocus();
validate();
}
@Override
protected void handleFieldChanged(String fieldName) {
super.handleFieldChanged(fieldName);
validate();
}
private void validate() {
updateStatus(new IStatus[] { fContainerStatus, fPackageStatus,
fTypeNameStatus, fSuperClassStatus, fSuperInterfacesStatus });
}
}
@Override
public boolean performFinish() {
if (super.performFinish()) {
if (getCreatedElement() != null) {
openResource((IFile) page.getModifiedResource());
selectAndReveal(page.getModifiedResource());
}
return true;
} else {
return false;
}
}
@Override
protected void finishPage(IProgressMonitor monitor)
throws InterruptedException, CoreException {
this.run(monitor);
}
@Override
public IJavaElement getCreatedElement() {
return page.getCreatedType().getPrimaryElement();
}
}

View File

@ -1,184 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse;
import java.io.IOException;
import java.util.Arrays;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.FileLocator;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Path;
import org.eclipse.jdt.core.IJavaElement;
import org.eclipse.jdt.core.IType;
import org.eclipse.jdt.internal.ui.wizards.NewElementWizard;
import org.eclipse.jdt.ui.wizards.NewTypeWizardPage;
import org.eclipse.jface.operation.IRunnableWithProgress;
import org.eclipse.jface.resource.ImageDescriptor;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.swt.SWT;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.ui.INewWizard;
import org.eclipse.ui.IWorkbench;
/**
* Wizard for creating a new Reducer class (a class that runs the Reduce
* portion of a MapReduce job). The class is pre-filled with a template.
*
*/
public class NewReducerWizard extends NewElementWizard implements
INewWizard, IRunnableWithProgress {
private Page page;
public NewReducerWizard() {
setWindowTitle("New Reducer");
}
public void run(IProgressMonitor monitor) {
try {
page.createType(monitor);
} catch (CoreException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
@Override
public void init(IWorkbench workbench, IStructuredSelection selection) {
super.init(workbench, selection);
page = new Page();
addPage(page);
page.setSelection(selection);
}
public static class Page extends NewTypeWizardPage {
public Page() {
super(true, "Reducer");
setTitle("Reducer");
setDescription("Create a new Reducer implementation.");
setImageDescriptor(ImageLibrary.get("wizard.reducer.new"));
}
public void setSelection(IStructuredSelection selection) {
initContainerPage(getInitialJavaElement(selection));
initTypePage(getInitialJavaElement(selection));
}
@Override
public void createType(IProgressMonitor monitor) throws CoreException,
InterruptedException {
super.createType(monitor);
}
@Override
protected void createTypeMembers(IType newType, ImportsManager imports,
IProgressMonitor monitor) throws CoreException {
super.createTypeMembers(newType, imports, monitor);
imports.addImport("java.io.IOException");
imports.addImport("org.apache.hadoop.io.WritableComparable");
imports.addImport("org.apache.hadoop.mapred.OutputCollector");
imports.addImport("org.apache.hadoop.mapred.Reporter");
imports.addImport("java.util.Iterator");
newType
.createMethod(
"public void reduce(WritableComparable _key, Iterator values, OutputCollector output, Reporter reporter) throws IOException \n{\n"
+ "\t// replace KeyType with the real type of your key\n"
+ "\tKeyType key = (KeyType) _key;\n\n"
+ "\twhile (values.hasNext()) {\n"
+ "\t\t// replace ValueType with the real type of your value\n"
+ "\t\tValueType value = (ValueType) values.next();\n\n"
+ "\t\t// process value\n" + "\t}\n" + "}\n", null, false,
monitor);
}
public void createControl(Composite parent) {
// super.createControl(parent);
initializeDialogUnits(parent);
Composite composite = new Composite(parent, SWT.NONE);
GridLayout layout = new GridLayout();
layout.numColumns = 4;
composite.setLayout(layout);
createContainerControls(composite, 4);
createPackageControls(composite, 4);
createSeparator(composite, 4);
createTypeNameControls(composite, 4);
createSuperClassControls(composite, 4);
createSuperInterfacesControls(composite, 4);
// createSeparator(composite, 4);
setControl(composite);
setSuperClass("org.apache.hadoop.mapred.MapReduceBase", true);
setSuperInterfaces(Arrays
.asList(new String[] { "org.apache.hadoop.mapred.Reducer" }), true);
setFocus();
validate();
}
@Override
protected void handleFieldChanged(String fieldName) {
super.handleFieldChanged(fieldName);
validate();
}
private void validate() {
updateStatus(new IStatus[] { fContainerStatus, fPackageStatus,
fTypeNameStatus, fSuperClassStatus, fSuperInterfacesStatus });
}
}
@Override
public boolean performFinish() {
if (super.performFinish()) {
if (getCreatedElement() != null) {
selectAndReveal(page.getModifiedResource());
openResource((IFile) page.getModifiedResource());
}
return true;
} else {
return false;
}
}
@Override
protected void finishPage(IProgressMonitor monitor)
throws InterruptedException, CoreException {
this.run(monitor);
}
@Override
public IJavaElement getCreatedElement() {
return (page.getCreatedType() == null) ? null : page.getCreatedType()
.getPrimaryElement();
}
}

View File

@ -1,43 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse;
import java.util.logging.Logger;
/**
* Class to help with debugging properties
*/
public class PropertyTester extends
org.eclipse.core.expressions.PropertyTester {
static Logger log = Logger.getLogger(PropertyTester.class.getName());
public PropertyTester() {
}
public boolean test(Object receiver, String property, Object[] args,
Object expectedValue) {
log.fine("Test property " + property + ", " + receiver.getClass());
return true;
// todo(jz) support test for deployable if module has hadoop nature etc.
}
}

View File

@ -1,478 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse.actions;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.hadoop.eclipse.ImageLibrary;
import org.apache.hadoop.eclipse.dfs.DFSActions;
import org.apache.hadoop.eclipse.dfs.DFSFile;
import org.apache.hadoop.eclipse.dfs.DFSFolder;
import org.apache.hadoop.eclipse.dfs.DFSLocation;
import org.apache.hadoop.eclipse.dfs.DFSLocationsRoot;
import org.apache.hadoop.eclipse.dfs.DFSPath;
import org.eclipse.core.resources.IStorage;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.PlatformObject;
import org.eclipse.jface.action.IAction;
import org.eclipse.jface.dialogs.InputDialog;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.jface.operation.IRunnableWithProgress;
import org.eclipse.jface.resource.ImageDescriptor;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.swt.SWT;
import org.eclipse.swt.widgets.DirectoryDialog;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.FileDialog;
import org.eclipse.ui.IObjectActionDelegate;
import org.eclipse.ui.IPersistableElement;
import org.eclipse.ui.IStorageEditorInput;
import org.eclipse.ui.IWorkbenchPart;
import org.eclipse.ui.PartInitException;
import org.eclipse.ui.PlatformUI;
/**
* Actual implementation of DFS actions
*/
public class DFSActionImpl implements IObjectActionDelegate {
private ISelection selection;
private IWorkbenchPart targetPart;
/* @inheritDoc */
public void setActivePart(IAction action, IWorkbenchPart targetPart) {
this.targetPart = targetPart;
}
/* @inheritDoc */
public void run(IAction action) {
// Ignore non structured selections
if (!(this.selection instanceof IStructuredSelection))
return;
// operate on the DFS asynchronously to prevent blocking the main UI
final IStructuredSelection ss = (IStructuredSelection) selection;
final String actionId = action.getActionDefinitionId();
Display.getDefault().asyncExec(new Runnable() {
public void run() {
try {
switch (DFSActions.getById(actionId)) {
case DELETE: {
delete(ss);
break;
}
case OPEN: {
open(ss);
break;
}
case MKDIR: {
mkdir(ss);
break;
}
case UPLOAD_FILES: {
uploadFilesToDFS(ss);
break;
}
case UPLOAD_DIR: {
uploadDirectoryToDFS(ss);
break;
}
case REFRESH: {
refresh(ss);
break;
}
case DOWNLOAD: {
downloadFromDFS(ss);
break;
}
case RECONNECT: {
reconnect(ss);
break;
}
case DISCONNECT: {
disconnect(ss);
break;
}
default: {
System.err.printf("Unhandled DFS Action: " + actionId);
break;
}
}
} catch (Exception e) {
e.printStackTrace();
MessageDialog.openError(Display.getDefault().getActiveShell(),
"DFS Action error",
"An error occurred while performing DFS operation: "
+ e.getMessage());
}
}
});
}
/**
* Create a new sub-folder into an existing directory
*
* @param selection
*/
private void mkdir(IStructuredSelection selection) {
List<DFSFolder> folders = filterSelection(DFSFolder.class, selection);
if (folders.size() >= 1) {
DFSFolder folder = folders.get(0);
InputDialog dialog =
new InputDialog(Display.getCurrent().getActiveShell(),
"Create subfolder", "Enter the name of the subfolder", "",
null);
if (dialog.open() == InputDialog.OK)
folder.mkdir(dialog.getValue());
}
}
/**
* Implement the import action (upload files from the current machine to
* HDFS)
*
* @param object
* @throws SftpException
* @throws JSchException
* @throws InvocationTargetException
* @throws InterruptedException
*/
private void uploadFilesToDFS(IStructuredSelection selection)
throws InvocationTargetException, InterruptedException {
// Ask the user which files to upload
FileDialog dialog =
new FileDialog(Display.getCurrent().getActiveShell(), SWT.OPEN
| SWT.MULTI);
dialog.setText("Select the local files to upload");
dialog.open();
List<File> files = new ArrayList<File>();
for (String fname : dialog.getFileNames())
files.add(new File(dialog.getFilterPath() + File.separator + fname));
// TODO enable upload command only when selection is exactly one folder
List<DFSFolder> folders = filterSelection(DFSFolder.class, selection);
if (folders.size() >= 1)
uploadToDFS(folders.get(0), files);
}
/**
* Implement the import action (upload directory from the current machine
* to HDFS)
*
* @param object
* @throws SftpException
* @throws JSchException
* @throws InvocationTargetException
* @throws InterruptedException
*/
private void uploadDirectoryToDFS(IStructuredSelection selection)
throws InvocationTargetException, InterruptedException {
// Ask the user which local directory to upload
DirectoryDialog dialog =
new DirectoryDialog(Display.getCurrent().getActiveShell(), SWT.OPEN
| SWT.MULTI);
dialog.setText("Select the local file or directory to upload");
String dirName = dialog.open();
final File dir = new File(dirName);
List<File> files = new ArrayList<File>();
files.add(dir);
// TODO enable upload command only when selection is exactly one folder
final List<DFSFolder> folders =
filterSelection(DFSFolder.class, selection);
if (folders.size() >= 1)
uploadToDFS(folders.get(0), files);
}
private void uploadToDFS(final DFSFolder folder, final List<File> files)
throws InvocationTargetException, InterruptedException {
PlatformUI.getWorkbench().getProgressService().busyCursorWhile(
new IRunnableWithProgress() {
public void run(IProgressMonitor monitor)
throws InvocationTargetException {
int work = 0;
for (File file : files)
work += computeUploadWork(file);
monitor.beginTask("Uploading files to distributed file system",
work);
for (File file : files) {
try {
folder.upload(monitor, file);
} catch (IOException ioe) {
ioe.printStackTrace();
MessageDialog.openError(null,
"Upload files to distributed file system",
"Upload failed.\n" + ioe);
}
}
monitor.done();
// Update the UI
folder.doRefresh();
}
});
}
private void reconnect(IStructuredSelection selection) {
for (DFSLocation location : filterSelection(DFSLocation.class, selection))
location.reconnect();
}
private void disconnect(IStructuredSelection selection) {
if (selection.size() != 1)
return;
Object first = selection.getFirstElement();
if (!(first instanceof DFSLocationsRoot))
return;
DFSLocationsRoot root = (DFSLocationsRoot) first;
root.disconnect();
root.refresh();
}
/**
* Implements the Download action from HDFS to the current machine
*
* @param object
* @throws SftpException
* @throws JSchException
* @throws InterruptedException
* @throws InvocationTargetException
*/
private void downloadFromDFS(IStructuredSelection selection)
throws InvocationTargetException, InterruptedException {
// Ask the user where to put the downloaded files
DirectoryDialog dialog =
new DirectoryDialog(Display.getCurrent().getActiveShell());
dialog.setText("Copy to local directory");
dialog.setMessage("Copy the selected files and directories from the "
+ "distributed filesystem to a local directory");
String directory = dialog.open();
if (directory == null)
return;
final File dir = new File(directory);
if (!dir.exists())
dir.mkdirs();
if (!dir.isDirectory()) {
MessageDialog.openError(null, "Download to local file system",
"Invalid directory location: \"" + dir + "\"");
return;
}
final List<DFSPath> paths = filterSelection(DFSPath.class, selection);
PlatformUI.getWorkbench().getProgressService().busyCursorWhile(
new IRunnableWithProgress() {
public void run(IProgressMonitor monitor)
throws InvocationTargetException {
int work = 0;
for (DFSPath path : paths)
work += path.computeDownloadWork();
monitor
.beginTask("Downloading files to local file system", work);
for (DFSPath path : paths) {
if (monitor.isCanceled())
return;
try {
path.downloadToLocalDirectory(monitor, dir);
} catch (Exception e) {
// nothing we want to do here
e.printStackTrace();
}
}
monitor.done();
}
});
}
/**
* Open the selected DfsPath in the editor window
*
* @param selection
* @throws JSchException
* @throws IOException
* @throws PartInitException
* @throws InvocationTargetException
* @throws InterruptedException
*/
private void open(IStructuredSelection selection) throws IOException,
PartInitException, InvocationTargetException, InterruptedException {
for (DFSFile file : filterSelection(DFSFile.class, selection)) {
IStorageEditorInput editorInput = new DFSFileEditorInput(file);
targetPart.getSite().getWorkbenchWindow().getActivePage().openEditor(
editorInput, "org.eclipse.ui.DefaultTextEditor");
}
}
/**
* @param selection
* @throws JSchException
*/
private void refresh(IStructuredSelection selection) {
for (DFSPath path : filterSelection(DFSPath.class, selection))
path.refresh();
}
private void delete(IStructuredSelection selection) {
List<DFSPath> list = filterSelection(DFSPath.class, selection);
if (list.isEmpty())
return;
StringBuffer msg = new StringBuffer();
msg.append("Are you sure you want to delete "
+ "the following files from the distributed file system?\n");
for (DFSPath path : list)
msg.append(path.getPath()).append("\n");
if (MessageDialog.openConfirm(null, "Confirm Delete from DFS", msg
.toString())) {
Set<DFSPath> toRefresh = new HashSet<DFSPath>();
for (DFSPath path : list) {
path.delete();
toRefresh.add(path.getParent());
}
for (DFSPath path : toRefresh) {
path.refresh();
}
}
}
/* @inheritDoc */
public void selectionChanged(IAction action, ISelection selection) {
this.selection = selection;
}
/**
* Extract the list of <T> from the structured selection
*
* @param clazz the class T
* @param selection the structured selection
* @return the list of <T> it contains
*/
private static <T> List<T> filterSelection(Class<T> clazz,
IStructuredSelection selection) {
List<T> list = new ArrayList<T>();
for (Object obj : selection.toList()) {
if (clazz.isAssignableFrom(obj.getClass())) {
list.add((T) obj);
}
}
return list;
}
private static int computeUploadWork(File file) {
if (file.isDirectory()) {
int contentWork = 1;
for (File child : file.listFiles())
contentWork += computeUploadWork(child);
return contentWork;
} else if (file.isFile()) {
return 1 + (int) (file.length() / 1024);
} else {
return 0;
}
}
}
/**
* Adapter to allow the viewing of a DfsFile in the Editor window
*/
class DFSFileEditorInput extends PlatformObject implements
IStorageEditorInput {
private DFSFile file;
/**
* Constructor
*
* @param file
*/
DFSFileEditorInput(DFSFile file) {
this.file = file;
}
/* @inheritDoc */
public String getToolTipText() {
return file.toDetailedString();
}
/* @inheritDoc */
public IPersistableElement getPersistable() {
return null;
}
/* @inheritDoc */
public String getName() {
return file.toString();
}
/* @inheritDoc */
public ImageDescriptor getImageDescriptor() {
return ImageLibrary.get("dfs.file.editor");
}
/* @inheritDoc */
public boolean exists() {
return true;
}
/* @inheritDoc */
public IStorage getStorage() throws CoreException {
return file.getIStorage();
}
};

View File

@ -1,73 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse.actions;
import org.apache.hadoop.eclipse.ImageLibrary;
import org.apache.hadoop.eclipse.server.HadoopServer;
import org.apache.hadoop.eclipse.servers.HadoopLocationWizard;
import org.apache.hadoop.eclipse.view.servers.ServerView;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.wizard.Wizard;
import org.eclipse.jface.wizard.WizardDialog;
/**
* Editing server properties action
*/
public class EditLocationAction extends Action {
private ServerView serverView;
public EditLocationAction(ServerView serverView) {
this.serverView = serverView;
setText("Edit Hadoop location...");
setImageDescriptor(ImageLibrary.get("server.view.action.location.edit"));
}
@Override
public void run() {
final HadoopServer server = serverView.getSelectedServer();
if (server == null)
return;
WizardDialog dialog = new WizardDialog(null, new Wizard() {
private HadoopLocationWizard page = new HadoopLocationWizard(server);
@Override
public void addPages() {
super.addPages();
setWindowTitle("Edit Hadoop location...");
addPage(page);
}
@Override
public boolean performFinish() {
page.performFinish();
return true;
}
});
dialog.create();
dialog.setBlockOnOpen(true);
dialog.open();
super.run();
}
}

View File

@ -1,64 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse.actions;
import org.apache.hadoop.eclipse.ImageLibrary;
import org.apache.hadoop.eclipse.servers.HadoopLocationWizard;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.wizard.Wizard;
import org.eclipse.jface.wizard.WizardDialog;
/**
* Action corresponding to creating a new MapReduce Server.
*/
public class NewLocationAction extends Action {
public NewLocationAction() {
setText("New Hadoop location...");
setImageDescriptor(ImageLibrary.get("server.view.action.location.new"));
}
@Override
public void run() {
WizardDialog dialog = new WizardDialog(null, new Wizard() {
private HadoopLocationWizard page = new HadoopLocationWizard();
@Override
public void addPages() {
super.addPages();
setWindowTitle("New Hadoop location...");
addPage(page);
}
@Override
public boolean performFinish() {
page.performFinish();
return true;
}
});
dialog.create();
dialog.setBlockOnOpen(true);
dialog.open();
super.run();
}
}

View File

@ -1,76 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse.actions;
import java.util.logging.Logger;
import org.apache.hadoop.eclipse.NewDriverWizard;
import org.apache.hadoop.eclipse.NewMapperWizard;
import org.apache.hadoop.eclipse.NewReducerWizard;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.viewers.StructuredSelection;
import org.eclipse.jface.window.Window;
import org.eclipse.jface.wizard.WizardDialog;
import org.eclipse.ui.INewWizard;
import org.eclipse.ui.IWorkbench;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.cheatsheets.ICheatSheetAction;
import org.eclipse.ui.cheatsheets.ICheatSheetManager;
/**
* Action to open a new MapReduce Class.
*/
public class OpenNewMRClassWizardAction extends Action implements
ICheatSheetAction {
static Logger log = Logger.getLogger(OpenNewMRClassWizardAction.class
.getName());
public void run(String[] params, ICheatSheetManager manager) {
if ((params != null) && (params.length > 0)) {
IWorkbench workbench = PlatformUI.getWorkbench();
INewWizard wizard = getWizard(params[0]);
wizard.init(workbench, new StructuredSelection());
WizardDialog dialog = new WizardDialog(PlatformUI.getWorkbench()
.getActiveWorkbenchWindow().getShell(), wizard);
dialog.create();
dialog.open();
// did the wizard succeed ?
notifyResult(dialog.getReturnCode() == Window.OK);
}
}
private INewWizard getWizard(String typeName) {
if (typeName.equals("Mapper")) {
return new NewMapperWizard();
} else if (typeName.equals("Reducer")) {
return new NewReducerWizard();
} else if (typeName.equals("Driver")) {
return new NewDriverWizard();
} else {
log.severe("Invalid Wizard requested");
return null;
}
}
}

View File

@ -1,48 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse.actions;
import org.apache.hadoop.eclipse.NewMapReduceProjectWizard;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.viewers.StructuredSelection;
import org.eclipse.jface.window.Window;
import org.eclipse.jface.wizard.WizardDialog;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.ui.IWorkbench;
import org.eclipse.ui.PlatformUI;
/**
* Action to open a new Map/Reduce project.
*/
public class OpenNewMRProjectAction extends Action {
@Override
public void run() {
IWorkbench workbench = PlatformUI.getWorkbench();
Shell shell = workbench.getActiveWorkbenchWindow().getShell();
NewMapReduceProjectWizard wizard = new NewMapReduceProjectWizard();
wizard.init(workbench, new StructuredSelection());
WizardDialog dialog = new WizardDialog(shell, wizard);
dialog.create();
dialog.open();
// did the wizard succeed?
notifyResult(dialog.getReturnCode() == Window.OK);
}
}

View File

@ -1,193 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse.dfs;
import org.apache.hadoop.eclipse.ImageLibrary;
import org.apache.hadoop.eclipse.actions.DFSActionImpl;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.action.IMenuManager;
import org.eclipse.jface.resource.ImageDescriptor;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.ui.IActionBars;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.actions.ActionFactory;
import org.eclipse.ui.navigator.CommonActionProvider;
import org.eclipse.ui.navigator.ICommonActionConstants;
import org.eclipse.ui.navigator.ICommonActionExtensionSite;
import org.eclipse.ui.navigator.ICommonMenuConstants;
/**
* Allows the user to delete and refresh items in the DFS tree
*/
public class ActionProvider extends CommonActionProvider {
private static ICommonActionExtensionSite site;
public ActionProvider() {
}
/* @inheritDoc */
@Override
public void init(ICommonActionExtensionSite site) {
if (ActionProvider.site != null) {
System.err.printf("%s: Multiple init()\n", this.getClass()
.getCanonicalName());
return;
}
super.init(site);
ActionProvider.site = site;
}
/* @inheritDoc */
@Override
public void fillActionBars(IActionBars actionBars) {
actionBars.setGlobalActionHandler(ActionFactory.DELETE.getId(),
new DFSAction(DFSActions.DELETE));
actionBars.setGlobalActionHandler(ActionFactory.REFRESH.getId(),
new DFSAction(DFSActions.REFRESH));
if (site == null)
return;
if ((site.getStructuredViewer().getSelection() instanceof IStructuredSelection)
&& (((IStructuredSelection) site.getStructuredViewer()
.getSelection()).size() == 1)
&& (((IStructuredSelection) site.getStructuredViewer()
.getSelection()).getFirstElement() instanceof DFSFile)) {
actionBars.setGlobalActionHandler(ICommonActionConstants.OPEN,
new DFSAction(DFSActions.OPEN));
}
actionBars.updateActionBars();
}
/* @inheritDoc */
@Override
public void fillContextMenu(IMenuManager menu) {
/*
* Actions on multiple selections
*/
menu.appendToGroup(ICommonMenuConstants.GROUP_EDIT, new DFSAction(
DFSActions.DELETE));
menu.appendToGroup(ICommonMenuConstants.GROUP_OPEN, new DFSAction(
DFSActions.REFRESH));
menu.appendToGroup(ICommonMenuConstants.GROUP_NEW, new DFSAction(
DFSActions.DOWNLOAD));
if (site == null)
return;
ISelection isel = site.getStructuredViewer().getSelection();
if (!(isel instanceof IStructuredSelection))
return;
/*
* Actions on single selections only
*/
IStructuredSelection issel = (IStructuredSelection) isel;
if (issel.size() != 1)
return;
Object element = issel.getFirstElement();
if (element instanceof DFSFile) {
menu.appendToGroup(ICommonMenuConstants.GROUP_OPEN, new DFSAction(
DFSActions.OPEN));
} else if (element instanceof DFSFolder) {
menu.appendToGroup(ICommonMenuConstants.GROUP_NEW, new DFSAction(
DFSActions.MKDIR));
menu.appendToGroup(ICommonMenuConstants.GROUP_NEW, new DFSAction(
DFSActions.UPLOAD_FILES));
menu.appendToGroup(ICommonMenuConstants.GROUP_NEW, new DFSAction(
DFSActions.UPLOAD_DIR));
} else if (element instanceof DFSLocation) {
menu.appendToGroup(ICommonMenuConstants.GROUP_OPEN, new DFSAction(
DFSActions.RECONNECT));
} else if (element instanceof DFSLocationsRoot) {
menu.appendToGroup(ICommonMenuConstants.GROUP_OPEN, new DFSAction(
DFSActions.DISCONNECT));
}
}
/**
* Representation of an action on a DFS entry in the browser
*/
public static class DFSAction extends Action {
private final String id;
private final String title;
private DFSActions action;
public DFSAction(String id, String title) {
this.id = id;
this.title = title;
}
public DFSAction(DFSActions action) {
this.id = action.id;
this.title = action.title;
}
/* @inheritDoc */
@Override
public String getText() {
return this.title;
}
/* @inheritDoc */
@Override
public ImageDescriptor getImageDescriptor() {
return ImageLibrary.get(getActionDefinitionId());
}
/* @inheritDoc */
@Override
public String getActionDefinitionId() {
return id;
}
/* @inheritDoc */
@Override
public void run() {
DFSActionImpl action = new DFSActionImpl();
action.setActivePart(this, PlatformUI.getWorkbench()
.getActiveWorkbenchWindow().getActivePage().getActivePart());
action.selectionChanged(this, site.getStructuredViewer()
.getSelection());
action.run(this);
}
/* @inheritDoc */
@Override
public boolean isEnabled() {
return true;
}
}
}

View File

@ -1,44 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse.dfs;
public enum DFSActions {
DELETE("Delete"), REFRESH("Refresh"), DOWNLOAD("Download from DFS..."), OPEN(
"View"), MKDIR("Create new directory..."), UPLOAD_FILES(
"Upload files to DFS..."), UPLOAD_DIR("Upload directory to DFS..."), RECONNECT(
"Reconnect"), DISCONNECT("Disconnect");
final String title;
final String id;
private static final String PREFIX = "dfs.browser.action.";
public static DFSActions getById(String def) {
if (!def.startsWith(PREFIX))
return null;
return valueOf(def.substring(PREFIX.length()).toUpperCase());
}
DFSActions(String title) {
this.title = title;
this.id = PREFIX + this.name().toLowerCase();
}
}

View File

@ -1,32 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse.dfs;
/**
* Interface to define content entities in the DFS browser
*/
public interface DFSContent {
boolean hasChildren();
DFSContent[] getChildren();
void refresh();
}

View File

@ -1,244 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse.dfs;
import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.eclipse.ImageLibrary;
import org.apache.hadoop.eclipse.server.HadoopServer;
import org.apache.hadoop.eclipse.servers.ServerRegistry;
import org.eclipse.jface.viewers.ILabelProvider;
import org.eclipse.jface.viewers.ILabelProviderListener;
import org.eclipse.jface.viewers.ITreeContentProvider;
import org.eclipse.jface.viewers.StructuredViewer;
import org.eclipse.jface.viewers.Viewer;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.widgets.Display;
/**
* Handles viewing of DFS locations
* <p>
*
* The content handled by this provider is a tree:
*
* <tt>
* <br>DFSLocationsRoot
* <br>\_HadoopServer
* <br>| \_DfsFolder
* <br>| | \_DfsFile
* <br>| \_DfsFolder
* <br>| ...
* <br>\_HadoopServer...
* </tt>
*
* The code should not block here: blocking operations need to be done
* asynchronously so as not to freeze the UI!
*/
public class DFSContentProvider implements ITreeContentProvider,
ILabelProvider {
/**
* The viewer that displays this Tree content
*/
private Viewer viewer;
private StructuredViewer sviewer;
private Map<HadoopServer, DFSContent> rootFolders =
new HashMap<HadoopServer, DFSContent>();
/**
* Constructor: load resources (icons).
*/
public DFSContentProvider() {
}
private final DFSLocationsRoot locationsRoot = new DFSLocationsRoot(this);
/*
* ITreeContentProvider implementation
*/
/* @inheritDoc */
public Object[] getChildren(Object parent) {
if (!(parent instanceof DFSContent))
return null;
DFSContent content = (DFSContent) parent;
return content.getChildren();
}
public Object[] test(Object parentElement) {
if (parentElement instanceof DFSLocationsRoot) {
return ServerRegistry.getInstance().getServers().toArray();
} else if (parentElement instanceof HadoopServer) {
final HadoopServer location = (HadoopServer) parentElement;
Object root = rootFolders.get(location);
if (root != null)
return new Object[] { root };
return new Object[] { "Connecting to DFS..." };
} else if (parentElement instanceof DFSFolder) {
DFSFolder folder = (DFSFolder) parentElement;
return folder.getChildren();
}
return new Object[] { "<Unknown DFSContent>" };
}
/* @inheritDoc */
public Object getParent(Object element) {
if (element instanceof DFSPath) {
return ((DFSPath) element).getParent();
} else if (element instanceof HadoopServer) {
return locationsRoot;
}
return null;
}
/* @inheritDoc */
public boolean hasChildren(Object element) {
if (element instanceof DFSContent) {
DFSContent content = (DFSContent) element;
return content.hasChildren();
}
return false;
}
/*
* IStructureContentProvider implementation
*/
/* @inheritDoc */
public Object[] getElements(final Object inputElement) {
return new Object[] { locationsRoot };
// return ServerRegistry.getInstance().getServers().toArray();
}
/*
* ILabelProvider implementation
*/
/* @inheritDoc */
public Image getImage(Object element) {
if (element instanceof DFSLocationsRoot)
return ImageLibrary.getImage("dfs.browser.root.entry");
else if (element instanceof DFSLocation)
return ImageLibrary.getImage("dfs.browser.location.entry");
else if (element instanceof DFSFolder)
return ImageLibrary.getImage("dfs.browser.folder.entry");
else if (element instanceof DFSFile)
return ImageLibrary.getImage("dfs.browser.file.entry");
return null;
}
/* @inheritDoc */
public String getText(Object element) {
if (element instanceof DFSFile)
return ((DFSFile) element).toDetailedString();
return element.toString();
}
/*
* IBaseLabelProvider implementation
*/
/* @inheritDoc */
public void addListener(ILabelProviderListener listener) {
}
/* @inheritDoc */
public void removeListener(ILabelProviderListener listener) {
}
/* @inheritDoc */
public boolean isLabelProperty(Object element, String property) {
return false;
}
/*
* IContentProvider implementation
*/
/* @inheritDoc */
public void dispose() {
}
/* @inheritDoc */
public void inputChanged(Viewer viewer, Object oldInput, Object newInput) {
this.viewer = viewer;
if ((viewer != null) && (viewer instanceof StructuredViewer))
this.sviewer = (StructuredViewer) viewer;
else
this.sviewer = null;
}
/*
* Miscellaneous
*/
/**
* Ask the viewer for this content to refresh
*/
void refresh() {
// no display, nothing to update
if (this.viewer == null)
return;
Display.getDefault().asyncExec(new Runnable() {
public void run() {
DFSContentProvider.this.viewer.refresh();
}
});
}
/**
* Ask the viewer to refresh a single element
*
* @param content what to refresh
*/
void refresh(final DFSContent content) {
if (this.sviewer != null) {
Display.getDefault().asyncExec(new Runnable() {
public void run() {
DFSContentProvider.this.sviewer.refresh(content);
}
});
} else {
refresh();
}
}
Viewer getViewer() {
return this.viewer;
}
}

View File

@ -1,350 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse.dfs;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.InvocationTargetException;
import org.apache.hadoop.eclipse.Activator;
import org.apache.hadoop.eclipse.ErrorMessageDialog;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.eclipse.core.resources.IStorage;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.PlatformObject;
import org.eclipse.core.runtime.Status;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.jface.operation.IRunnableWithProgress;
import org.eclipse.ui.PlatformUI;
/**
* File handling methods for the DFS
*/
public class DFSFile extends DFSPath implements DFSContent {
protected long length;
protected short replication;
/**
* Constructor to upload a file on the distributed file system
*
* @param parent
* @param path
* @param file
* @param monitor
*/
public DFSFile(DFSPath parent, Path path, File file,
IProgressMonitor monitor) {
super(parent, path);
this.upload(monitor, file);
}
public DFSFile(DFSPath parent, Path path) {
super(parent, path);
try {
FileStatus fs = getDFS().getFileStatus(path);
this.length = fs.getLen();
this.replication = fs.getReplication();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Download and view contents of a file
*
* @return a InputStream for the file
*/
public InputStream open() throws IOException {
return getDFS().open(this.path);
}
/**
* Download this file to the local file system. This creates a download
* status monitor.
*
* @param file
* @throws JSchException
* @throws IOException
* @throws InvocationTargetException
* @throws InterruptedException
*
* @deprecated
*/
public void downloadToLocalFile(final File file)
throws InvocationTargetException, InterruptedException {
PlatformUI.getWorkbench().getProgressService().busyCursorWhile(
new IRunnableWithProgress() {
public void run(IProgressMonitor monitor)
throws InvocationTargetException {
DFSFile.this.downloadToLocalFile(monitor, file);
}
});
}
/* @inheritDoc */
@Override
public void downloadToLocalDirectory(IProgressMonitor monitor, File dir) {
File dfsPath = new File(this.getPath().toString());
File destination = new File(dir, dfsPath.getName());
if (destination.exists()) {
boolean answer =
MessageDialog.openQuestion(null, "Overwrite existing local file?",
"The file you are attempting to download from the DFS "
+ this.getPath()
+ ", already exists in your local directory as "
+ destination + ".\n" + "Overwrite the existing file?");
if (!answer)
return;
}
try {
this.downloadToLocalFile(monitor, destination);
} catch (Exception e) {
e.printStackTrace();
MessageDialog.openWarning(null, "Download to local file system",
"Downloading of file \"" + this.path + "\" to local directory \""
+ dir + "\" has failed.\n" + e);
}
}
/**
* Provides a detailed string for this file
*
* @return the string formatted as
* <tt>&lt;filename&gt; (&lt;size&gt;, r&lt;replication&gt;)</tt>
*/
public String toDetailedString() {
final String[] units = { "b", "Kb", "Mb", "Gb", "Tb" };
int unit = 0;
double l = this.length;
while ((l >= 1024.0) && (unit < units.length)) {
unit += 1;
l /= 1024.0;
}
return String.format("%s (%.1f %s, r%d)", super.toString(), l,
units[unit], this.replication);
}
/* @inheritDoc */
@Override
public String toString() {
return this.path.toString();
}
/*
*
*/
/**
* Download the DfsFile to a local file. Use the given monitor to report
* status of operation.
*
* @param monitor the status monitor
* @param file the local file where to put the downloaded file
* @throws InvocationTargetException
*/
public void downloadToLocalFile(IProgressMonitor monitor, File file)
throws InvocationTargetException {
final int taskSize = 1024;
monitor.setTaskName("Download file " + this.path);
BufferedOutputStream ostream = null;
DataInputStream istream = null;
try {
istream = getDFS().open(this.path);
ostream = new BufferedOutputStream(new FileOutputStream(file));
int bytes;
byte[] buffer = new byte[taskSize];
while ((bytes = istream.read(buffer)) >= 0) {
if (monitor.isCanceled())
return;
ostream.write(buffer, 0, bytes);
monitor.worked(1);
}
} catch (Exception e) {
throw new InvocationTargetException(e);
} finally {
// Clean all opened resources
if (istream != null) {
try {
istream.close();
} catch (IOException e) {
e.printStackTrace();
// nothing we can do here
}
}
try {
ostream.close();
} catch (IOException e) {
e.printStackTrace();
// nothing we can do here
}
}
}
/**
* Upload a local file to this file on the distributed file system
*
* @param monitor
* @param file
*/
public void upload(IProgressMonitor monitor, File file) {
final int taskSize = 1024;
monitor.setTaskName("Upload file " + this.path);
BufferedInputStream istream = null;
DataOutputStream ostream = null;
try {
istream = new BufferedInputStream(new FileInputStream(file));
ostream = getDFS().create(this.path);
int bytes;
byte[] buffer = new byte[taskSize];
while ((bytes = istream.read(buffer)) >= 0) {
if (monitor.isCanceled())
return;
ostream.write(buffer, 0, bytes);
monitor.worked(1);
}
} catch (Exception e) {
ErrorMessageDialog.display(String.format(
"Unable to uploade file %s to %s", file, this.path), e
.getLocalizedMessage());
} finally {
try {
if (istream != null)
istream.close();
} catch (IOException e) {
e.printStackTrace();
// nothing we can do here
}
try {
if (ostream != null)
ostream.close();
} catch (IOException e) {
e.printStackTrace();
// nothing we can do here
}
}
}
/* @inheritDoc */
@Override
public void refresh() {
getParent().refresh();
}
/* @inheritDoc */
@Override
public int computeDownloadWork() {
return 1 + (int) (this.length / 1024);
}
/**
* Creates an adapter for the file to open it in the Editor
*
* @return the IStorage
*/
public IStorage getIStorage() {
return new IStorageAdapter();
}
/**
* IStorage adapter to open the file in the Editor
*/
private class IStorageAdapter extends PlatformObject implements IStorage {
/* @inheritDoc */
public InputStream getContents() throws CoreException {
try {
return DFSFile.this.open();
} catch (IOException ioe) {
throw new CoreException(new Status(Status.ERROR,
Activator.PLUGIN_ID, 0, "Unable to open file \""
+ DFSFile.this.path + "\"", ioe));
}
}
/* @inheritDoc */
public IPath getFullPath() {
return new org.eclipse.core.runtime.Path(DFSFile.this.path.toString());
}
/* @inheritDoc */
public String getName() {
return DFSFile.this.path.getName();
}
/* @inheritDoc */
public boolean isReadOnly() {
return true;
}
}
/*
* Implementation of DFSContent
*/
/* @inheritDoc */
public DFSContent[] getChildren() {
return null;
}
/* @inheritDoc */
public boolean hasChildren() {
return false;
}
}

View File

@ -1,213 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse.dfs;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Logger;
import org.apache.hadoop.eclipse.server.HadoopServer;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.Job;
import org.eclipse.jface.dialogs.MessageDialog;
/**
* Local representation of a folder in the DFS.
*
* The constructor creates an empty representation of the folder and spawn a
* thread that will fill
*/
public class DFSFolder extends DFSPath implements DFSContent {
static Logger log = Logger.getLogger(DFSFolder.class.getName());
private DFSContent[] children;
protected DFSFolder(DFSContentProvider provider, HadoopServer location)
throws IOException {
super(provider, location);
}
private DFSFolder(DFSPath parent, Path path) {
super(parent, path);
}
protected void loadDFSFolderChildren() throws IOException {
List<DFSPath> list = new ArrayList<DFSPath>();
for (FileStatus status : getDFS().listStatus(this.getPath())) {
if (status.isDir()) {
list.add(new DFSFolder(this, status.getPath()));
} else {
list.add(new DFSFile(this, status.getPath()));
}
}
this.children = list.toArray(new DFSContent[list.size()]);
}
/**
* Upload the given file or directory into this DfsFolder
*
* @param file
* @throws IOException
*/
public void upload(IProgressMonitor monitor, final File file)
throws IOException {
if (file.isDirectory()) {
Path filePath = new Path(this.path, file.getName());
getDFS().mkdirs(filePath);
DFSFolder newFolder = new DFSFolder(this, filePath);
monitor.worked(1);
for (File child : file.listFiles()) {
if (monitor.isCanceled())
return;
newFolder.upload(monitor, child);
}
} else if (file.isFile()) {
Path filePath = new Path(this.path, file.getName());
DFSFile newFile = new DFSFile(this, filePath, file, monitor);
} else {
// XXX don't know what the file is?
}
}
/* @inheritDoc */
@Override
public void downloadToLocalDirectory(IProgressMonitor monitor, File dir) {
if (!dir.exists())
dir.mkdirs();
if (!dir.isDirectory()) {
MessageDialog.openError(null, "Download to local file system",
"Invalid directory location: \"" + dir + "\"");
return;
}
File dfsPath = new File(this.getPath().toString());
File destination = new File(dir, dfsPath.getName());
if (!destination.exists()) {
if (!destination.mkdir()) {
MessageDialog.openError(null, "Download to local directory",
"Unable to create directory " + destination.getAbsolutePath());
return;
}
}
// Download all DfsPath children
for (Object childObj : getChildren()) {
if (childObj instanceof DFSPath) {
((DFSPath) childObj).downloadToLocalDirectory(monitor, destination);
monitor.worked(1);
}
}
}
/* @inheritDoc */
@Override
public int computeDownloadWork() {
int work = 1;
for (DFSContent child : getChildren()) {
if (child instanceof DFSPath)
work += ((DFSPath) child).computeDownloadWork();
}
return work;
}
/**
* Create a new sub directory into this directory
*
* @param folderName
*/
public void mkdir(String folderName) {
try {
getDFS().mkdirs(new Path(this.path, folderName));
} catch (IOException ioe) {
ioe.printStackTrace();
}
doRefresh();
}
/*
* Implementation of DFSContent
*/
/* @inheritDoc */
public boolean hasChildren() {
if (this.children == null)
return true;
else
return (this.children.length > 0);
}
/* @inheritDoc */
public DFSContent[] getChildren() {
if (children == null) {
new Job("Connecting to DFS " + location) {
@Override
protected IStatus run(IProgressMonitor monitor) {
try {
loadDFSFolderChildren();
return Status.OK_STATUS;
} catch (IOException ioe) {
children =
new DFSContent[] { new DFSMessage("Error: "
+ ioe.getLocalizedMessage()) };
return Status.CANCEL_STATUS;
} finally {
// Under all circumstances, update the UI
provider.refresh(DFSFolder.this);
}
}
}.schedule();
return new DFSContent[] { new DFSMessage("Listing folder content...") };
}
return this.children;
}
/* @inheritDoc */
@Override
public void refresh() {
this.children = null;
this.doRefresh();
}
/* @inheritDoc */
@Override
public String toString() {
return String.format("%s (%s)", super.toString(),
this.getChildren().length);
}
}

View File

@ -1,108 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse.dfs;
import java.io.IOException;
import org.apache.hadoop.eclipse.server.HadoopServer;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.Job;
/**
* DFS Content representation of a HDFS location
*/
public class DFSLocation implements DFSContent {
private final DFSContentProvider provider;
private final HadoopServer location;
private DFSContent rootFolder = null;
DFSLocation(DFSContentProvider provider, HadoopServer server) {
this.provider = provider;
this.location = server;
}
/* @inheritDoc */
@Override
public String toString() {
return this.location.getLocationName();
}
/*
* Implementation of DFSContent
*/
/* @inheritDoc */
public DFSContent[] getChildren() {
if (this.rootFolder == null) {
/*
* DfsFolder constructor might block as it contacts the NameNode: work
* asynchronously here or this will potentially freeze the UI
*/
new Job("Connecting to DFS " + location) {
@Override
protected IStatus run(IProgressMonitor monitor) {
try {
rootFolder = new DFSFolder(provider, location);
return Status.OK_STATUS;
} catch (IOException ioe) {
rootFolder =
new DFSMessage("Error: " + ioe.getLocalizedMessage());
return Status.CANCEL_STATUS;
} finally {
// Under all circumstances, update the UI
provider.refresh(DFSLocation.this);
}
}
}.schedule();
return new DFSContent[] { new DFSMessage("Connecting to DFS "
+ toString()) };
}
return new DFSContent[] { this.rootFolder };
}
/* @inheritDoc */
public boolean hasChildren() {
return true;
}
/* @inheritDoc */
public void refresh() {
this.rootFolder = null;
this.provider.refresh(this);
}
/*
* Actions
*/
/**
* Refresh the location using a new connection
*/
public void reconnect() {
this.refresh();
}
}

View File

@ -1,150 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse.dfs;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.eclipse.server.HadoopServer;
import org.apache.hadoop.eclipse.servers.IHadoopServerListener;
import org.apache.hadoop.eclipse.servers.ServerRegistry;
import org.apache.hadoop.fs.FileSystem;
/**
* Representation of the root element containing all DFS servers. This
* content registers an observer on Hadoop servers so as to update itself
* when servers are updated.
*/
public class DFSLocationsRoot implements DFSContent, IHadoopServerListener {
/**
*
*/
private final DFSContentProvider provider;
private Map<HadoopServer, DFSLocation> map =
new HashMap<HadoopServer, DFSLocation>();
/**
* Register a listeners to track DFS locations updates
*
* @param provider the content provider this content is the root of
*/
DFSLocationsRoot(DFSContentProvider provider) {
this.provider = provider;
ServerRegistry.getInstance().addListener(this);
this.refresh();
}
/*
* Implementation of IHadoopServerListener
*/
/* @inheritDoc */
public synchronized void serverChanged(final HadoopServer location,
final int type) {
switch (type) {
case ServerRegistry.SERVER_STATE_CHANGED: {
this.provider.refresh(map.get(location));
break;
}
case ServerRegistry.SERVER_ADDED: {
DFSLocation dfsLoc = new DFSLocation(provider, location);
map.put(location, dfsLoc);
this.provider.refresh(this);
break;
}
case ServerRegistry.SERVER_REMOVED: {
map.remove(location);
this.provider.refresh(this);
break;
}
}
}
/**
* Recompute the map of Hadoop locations
*/
private synchronized void reloadLocations() {
map.clear();
for (HadoopServer location : ServerRegistry.getInstance().getServers())
map.put(location, new DFSLocation(provider, location));
}
/* @inheritDoc */
@Override
public String toString() {
return "DFS Locations";
}
/*
* Implementation of DFSContent
*/
/* @inheritDoc */
public synchronized DFSContent[] getChildren() {
return this.map.values().toArray(new DFSContent[this.map.size()]);
}
/* @inheritDoc */
public boolean hasChildren() {
return (this.map.size() > 0);
}
/* @inheritDoc */
public void refresh() {
reloadLocations();
this.provider.refresh(this);
}
/*
* Actions
*/
public void disconnect() {
Thread closeThread = new Thread() {
/* @inheritDoc */
@Override
public void run() {
try {
System.out.printf("Closing all opened File Systems...\n");
FileSystem.closeAll();
System.out.printf("File Systems closed\n");
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
};
// Wait 5 seconds for the connections to be closed
closeThread.start();
try {
closeThread.join(5000);
} catch (InterruptedException ie) {
// Ignore
}
}
}

View File

@ -1,57 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse.dfs;
/**
* DFS Content that displays a message.
*/
class DFSMessage implements DFSContent {
private String message;
DFSMessage(String message) {
this.message = message;
}
/* @inheritDoc */
@Override
public String toString() {
return this.message;
}
/*
* Implementation of DFSContent
*/
/* @inheritDoc */
public DFSContent[] getChildren() {
return null;
}
/* @inheritDoc */
public boolean hasChildren() {
return false;
}
/* @inheritDoc */
public void refresh() {
// Nothing to do
}
}

View File

@ -1,160 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse.dfs;
import java.io.File;
import java.io.IOException;
import java.util.logging.Logger;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.eclipse.ErrorMessageDialog;
import org.apache.hadoop.eclipse.server.ConfProp;
import org.apache.hadoop.eclipse.server.HadoopServer;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.jface.dialogs.MessageDialog;
/**
* DFS Path handling for DFS
*/
public abstract class DFSPath implements DFSContent {
protected final DFSContentProvider provider;
protected HadoopServer location;
private DistributedFileSystem dfs = null;
protected final Path path;
protected final DFSPath parent;
/**
* For debugging purpose
*/
static Logger log = Logger.getLogger(DFSPath.class.getName());
/**
* Create a path representation for the given location in the given viewer
*
* @param location
* @param path
* @param viewer
*/
public DFSPath(DFSContentProvider provider, HadoopServer location)
throws IOException {
this.provider = provider;
this.location = location;
this.path = new Path("/");
this.parent = null;
}
/**
* Create a sub-path representation for the given parent path
*
* @param parent
* @param path
*/
protected DFSPath(DFSPath parent, Path path) {
this.provider = parent.provider;
this.location = parent.location;
this.dfs = parent.dfs;
this.parent = parent;
this.path = path;
}
protected void dispose() {
// Free the DFS connection
}
/* @inheritDoc */
@Override
public String toString() {
if (path.equals("/")) {
return location.getConfProp(ConfProp.FS_DEFAULT_URI);
} else {
return this.path.getName();
}
}
/**
* Does a recursive delete of the remote directory tree at this node.
*/
public void delete() {
try {
getDFS().delete(this.path, true);
} catch (IOException e) {
e.printStackTrace();
MessageDialog.openWarning(null, "Delete file",
"Unable to delete file \"" + this.path + "\"\n" + e);
}
}
public DFSPath getParent() {
return parent;
}
public abstract void refresh();
/**
* Refresh the UI element for this content
*/
public void doRefresh() {
provider.refresh(this);
}
/**
* Copy the DfsPath to the given local directory
*
* @param directory the local directory
*/
public abstract void downloadToLocalDirectory(IProgressMonitor monitor,
File dir);
public Path getPath() {
return this.path;
}
/**
* Gets a connection to the DFS
*
* @return a connection to the DFS
* @throws IOException
*/
DistributedFileSystem getDFS() throws IOException {
if (this.dfs == null) {
FileSystem fs = location.getDFS();
if (!(fs instanceof DistributedFileSystem)) {
ErrorMessageDialog.display("DFS Browser",
"The DFS Browser cannot browse anything else "
+ "but a Distributed File System!");
throw new IOException("DFS Browser expects a DistributedFileSystem!");
}
this.dfs = (DistributedFileSystem) fs;
}
return this.dfs;
}
public abstract int computeDownloadWork();
}

Some files were not shown because too many files have changed in this diff Show More