HDFS-16473. Make HDFS stat tool cross platform (#4145)

* The source files for hdfs_stat
  uses getopt for parsing the
  command line arguments.
* getopt is available only on
  Linux and thus, isn't cross platform.
* We need to replace getopt with
  boost::program_options to make
  this tool cross platform.
This commit is contained in:
Gautham B A 2022-04-08 23:06:39 +05:30 committed by GitHub
parent b69ede7154
commit d5e97fe4d6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 423 additions and 89 deletions

View File

@ -39,6 +39,7 @@ add_executable(hdfs_tool_tests
hdfs-find-mock.cc
hdfs-ls-mock.cc
hdfs-setrep-mock.cc
hdfs-stat-mock.cc
main.cc)
target_include_directories(hdfs_tool_tests PRIVATE
../tools
@ -62,6 +63,7 @@ target_include_directories(hdfs_tool_tests PRIVATE
../../tools/hdfs-find
../../tools/hdfs-ls
../../tools/hdfs-setrep
../../tools/hdfs-stat
../../tools/hdfs-cat)
target_link_libraries(hdfs_tool_tests PRIVATE
gmock_main
@ -84,5 +86,6 @@ target_link_libraries(hdfs_tool_tests PRIVATE
hdfs_find_lib
hdfs_ls_lib
hdfs_setrep_lib
hdfs_stat_lib
hdfs_cat_lib)
add_test(hdfs_tool_tests hdfs_tool_tests)

View File

@ -0,0 +1,55 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <functional>
#include <memory>
#include <string>
#include <vector>
#include <gmock/gmock.h>
#include <gtest/gtest.h>
#include "hdfs-stat-mock.h"
#include "hdfs-tool-tests.h"
namespace hdfs::tools::test {
StatMock::~StatMock() = default;
void StatMock::SetExpectations(
std::function<std::unique_ptr<StatMock>()> test_case,
const std::vector<std::string> &args) const {
// Get the pointer to the function that defines the test case
const auto test_case_func =
test_case.target<std::unique_ptr<StatMock> (*)()>();
ASSERT_NE(test_case_func, nullptr);
// Set the expected method calls and their corresponding arguments for each
// test case
if (*test_case_func == &CallHelp<StatMock>) {
EXPECT_CALL(*this, HandleHelp()).Times(1).WillOnce(testing::Return(true));
return;
}
if (*test_case_func == &PassAPath<StatMock>) {
const auto path = args[0];
EXPECT_CALL(*this, HandlePath(path))
.Times(1)
.WillOnce(testing::Return(true));
}
}
} // namespace hdfs::tools::test

View File

@ -0,0 +1,67 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef LIBHDFSPP_TOOLS_HDFS_STAT_MOCK
#define LIBHDFSPP_TOOLS_HDFS_STAT_MOCK
#include <functional>
#include <memory>
#include <string>
#include <vector>
#include <gmock/gmock.h>
#include "hdfs-stat.h"
namespace hdfs::tools::test {
/**
* {@class StatMock} is an {@class Stat} whereby it mocks the
* HandleHelp and HandlePath methods for testing their functionality.
*/
class StatMock : public hdfs::tools::Stat {
public:
/**
* {@inheritdoc}
*/
StatMock(const int argc, char **argv) : Stat(argc, argv) {}
// Abiding to the Rule of 5
StatMock(const StatMock &) = delete;
StatMock(StatMock &&) = delete;
StatMock &operator=(const StatMock &) = delete;
StatMock &operator=(StatMock &&) = delete;
~StatMock() override;
/**
* Defines the methods and the corresponding arguments that are expected
* to be called on this instance of {@link HdfsTool} for the given test case.
*
* @param test_case An {@link std::function} object that points to the
* function defining the test case
* @param args The arguments that are passed to this test case
*/
void SetExpectations(std::function<std::unique_ptr<StatMock>()> test_case,
const std::vector<std::string> &args = {}) const;
MOCK_METHOD(bool, HandleHelp, (), (const, override));
MOCK_METHOD(bool, HandlePath, (const std::string &), (const, override));
};
} // namespace hdfs::tools::test
#endif

View File

@ -39,6 +39,7 @@
#include "hdfs-rename-snapshot-mock.h"
#include "hdfs-rm-mock.h"
#include "hdfs-setrep-mock.h"
#include "hdfs-stat-mock.h"
#include "hdfs-tool-test-fixtures.h"
#include "hdfs-tool-tests.h"
@ -162,6 +163,11 @@ INSTANTIATE_TEST_SUITE_P(
testing::Values(CallHelp<hdfs::tools::test::SetrepMock>,
PassPermissionsAndAPath<hdfs::tools::test::SetrepMock>));
INSTANTIATE_TEST_SUITE_P(
HdfsStat, HdfsToolBasicTest,
testing::Values(CallHelp<hdfs::tools::test::StatMock>,
PassAPath<hdfs::tools::test::StatMock>));
// Negative tests
INSTANTIATE_TEST_SUITE_P(
HdfsAllowSnapshot, HdfsToolNegativeTestThrows,
@ -265,6 +271,17 @@ INSTANTIATE_TEST_SUITE_P(
PassMOpt<hdfs::tools::test::SetrepMock>,
PassNOpt<hdfs::tools::test::SetrepMock>));
INSTANTIATE_TEST_SUITE_P(
HdfsStat, HdfsToolNegativeTestThrows,
testing::Values(Pass2Paths<hdfs::tools::test::StatMock>,
Pass3Paths<hdfs::tools::test::StatMock>,
PassRecursiveOwnerAndAPath<hdfs::tools::test::StatMock>,
PassRecursive<hdfs::tools::test::StatMock>,
PassRecursivePath<hdfs::tools::test::StatMock>,
PassMPOptsPermissionsAndAPath<hdfs::tools::test::StatMock>,
PassMOpt<hdfs::tools::test::StatMock>,
PassNOpt<hdfs::tools::test::StatMock>));
INSTANTIATE_TEST_SUITE_P(
HdfsRm, HdfsToolNegativeTestNoThrow,
testing::Values(PassRecursive<hdfs::tools::test::RmMock>));

View File

@ -49,8 +49,7 @@ add_subdirectory(hdfs-rm)
add_subdirectory(hdfs-ls)
add_executable(hdfs_stat hdfs_stat.cc)
target_link_libraries(hdfs_stat tools_common hdfspp_static)
add_subdirectory(hdfs-stat)
add_subdirectory(hdfs-count)

View File

@ -0,0 +1,27 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
add_library(hdfs_stat_lib STATIC $<TARGET_OBJECTS:hdfs_tool_obj> hdfs-stat.cc)
target_include_directories(hdfs_stat_lib PRIVATE ../../tools ${Boost_INCLUDE_DIRS})
target_link_libraries(hdfs_stat_lib PRIVATE Boost::boost Boost::program_options tools_common hdfspp_static)
add_executable(hdfs_stat main.cc)
target_include_directories(hdfs_stat PRIVATE ../../tools)
target_link_libraries(hdfs_stat PRIVATE hdfs_stat_lib)
install(TARGETS hdfs_stat RUNTIME DESTINATION bin)

View File

@ -0,0 +1,111 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <future>
#include <iostream>
#include <memory>
#include <ostream>
#include <sstream>
#include <string>
#include "hdfs-stat.h"
#include "tools_common.h"
namespace hdfs::tools {
Stat::Stat(const int argc, char **argv) : HdfsTool(argc, argv) {}
bool Stat::Initialize() {
auto add_options = opt_desc_.add_options();
add_options("help,h", "Displays the stat information for the given path. The "
"path can be a file or a directory.");
add_options("path", po::value<std::string>(),
"The path in the filesystem for which to display the "
"stat information.");
// We allow only one positional argument to be passed to this tool. An
// exception is thrown if multiple arguments are passed.
pos_opt_desc_.add("path", 1);
po::store(po::command_line_parser(argc_, argv_)
.options(opt_desc_)
.positional(pos_opt_desc_)
.run(),
opt_val_);
po::notify(opt_val_);
return true;
}
std::string Stat::GetDescription() const {
std::stringstream desc;
desc << "Usage: hdfs_stat PATH" << std::endl
<< std::endl
<< "Displays the stat information for the given path." << std::endl
<< "The path can be a file or a directory." << std::endl
<< "Examples:" << std::endl
<< "hdfs_stat hdfs://localhost.localdomain:8020/dir/file" << std::endl;
return desc.str();
}
bool Stat::Do() {
if (!Initialize()) {
std::cerr << "Unable to initialize HDFS stat tool" << std::endl;
return false;
}
if (!ValidateConstraints()) {
std::cout << GetDescription();
return false;
}
if (opt_val_.count("help") > 0) {
return HandleHelp();
}
if (opt_val_.count("path") > 0) {
const auto path = opt_val_["path"].as<std::string>();
return HandlePath(path);
}
return false;
}
bool Stat::HandleHelp() const {
std::cout << GetDescription();
return true;
}
bool Stat::HandlePath(const std::string &path) const {
// Building a URI object from the given uri_path
auto uri = hdfs::parse_path_or_exit(path);
const auto fs = hdfs::doConnect(uri, false);
if (!fs) {
std::cerr << "Could not connect the file system. " << std::endl;
return false;
}
hdfs::StatInfo stat_info;
const auto status = fs->GetFileInfo(uri.get_path(), stat_info);
if (!status.ok()) {
std::cerr << "Error: " << status.ToString() << std::endl;
return false;
}
std::cout << stat_info.str() << std::endl;
return true;
}
} // namespace hdfs::tools

View File

@ -0,0 +1,90 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef LIBHDFSPP_TOOLS_HDFS_STAT
#define LIBHDFSPP_TOOLS_HDFS_STAT
#include <string>
#include <boost/program_options.hpp>
#include "hdfs-tool.h"
namespace hdfs::tools {
/**
* {@class Stat} is an {@class HdfsTool} that displays the stat information for
* the given path. The path can be a file or a directory.
*/
class Stat : public HdfsTool {
public:
/**
* {@inheritdoc}
*/
Stat(int argc, char **argv);
// Abiding to the Rule of 5
Stat(const Stat &) = default;
Stat(Stat &&) = default;
Stat &operator=(const Stat &) = delete;
Stat &operator=(Stat &&) = delete;
~Stat() override = default;
/**
* {@inheritdoc}
*/
[[nodiscard]] std::string GetDescription() const override;
/**
* {@inheritdoc}
*/
[[nodiscard]] bool Do() override;
protected:
/**
* {@inheritdoc}
*/
[[nodiscard]] bool Initialize() override;
/**
* {@inheritdoc}
*/
[[nodiscard]] bool ValidateConstraints() const override { return argc_ > 1; }
/**
* {@inheritdoc}
*/
[[nodiscard]] bool HandleHelp() const override;
/**
* Handle the path argument that's passed to this tool.
*
* @param path The path to the directory for which we need the stat info.
*
* @return A boolean indicating the result of this operation.
*/
[[nodiscard]] virtual bool HandlePath(const std::string &path) const;
private:
/**
* A boost data-structure containing the description of positional arguments
* passed to the command-line.
*/
po::positional_options_description pos_opt_desc_;
};
} // namespace hdfs::tools
#endif

View File

@ -0,0 +1,52 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cstdlib>
#include <exception>
#include <iostream>
#include <google/protobuf/stubs/common.h>
#include "hdfs-stat.h"
int main(int argc, char *argv[]) {
const auto result = std::atexit([]() -> void {
// Clean up static data on exit and prevent valgrind memory leaks
google::protobuf::ShutdownProtobufLibrary();
});
if (result != 0) {
std::cerr << "Error: Unable to schedule the clean-up tasks for HDFS stat"
"tool, exiting"
<< std::endl;
std::exit(EXIT_FAILURE);
}
hdfs::tools::Stat stat(argc, argv);
auto success = false;
try {
success = stat.Do();
} catch (const std::exception &e) {
std::cerr << "Error: " << e.what() << std::endl;
}
if (!success) {
std::exit(EXIT_FAILURE);
}
return 0;
}

View File

@ -1,87 +0,0 @@
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
#include <google/protobuf/stubs/common.h>
#include <unistd.h>
#include "tools_common.h"
void usage(){
std::cout << "Usage: hdfs_rm [OPTION] FILE"
<< std::endl
<< std::endl << "Display FILE status."
<< std::endl
<< std::endl << " -h display this help and exit"
<< std::endl
<< std::endl << "Examples:"
<< std::endl << "hdfs_rm hdfs://localhost.localdomain:8020/dir/file"
<< std::endl << "hdfs_rm -R /dir1/dir2"
<< std::endl;
}
int main(int argc, char *argv[]) {
//We should have at least 2 arguments
if (argc < 2) {
usage();
exit(EXIT_FAILURE);
}
int input;
//Using GetOpt to read in the values
opterr = 0;
while ((input = getopt(argc, argv, "h")) != -1) {
switch (input)
{
case 'h':
usage();
exit(EXIT_SUCCESS);
case '?':
if (isprint(optopt))
std::cerr << "Unknown option `-" << (char) optopt << "'." << std::endl;
else
std::cerr << "Unknown option character `" << (char) optopt << "'." << std::endl;
usage();
exit(EXIT_FAILURE);
default:
exit(EXIT_FAILURE);
}
}
std::string uri_path = argv[optind];
//Building a URI object from the given uri_path
hdfs::URI uri = hdfs::parse_path_or_exit(uri_path);
std::shared_ptr<hdfs::FileSystem> fs = hdfs::doConnect(uri, false);
if (!fs) {
std::cerr << "Could not connect the file system. " << std::endl;
exit(EXIT_FAILURE);
}
hdfs::StatInfo stat_info;
hdfs::Status status = fs->GetFileInfo(uri.get_path(), stat_info);
if (!status.ok()) {
std::cerr << "Error: " << status.ToString() << std::endl;
exit(EXIT_FAILURE);
}
std::cout << stat_info.str() << std::endl;
// Clean up static data and prevent valgrind memory leaks
google::protobuf::ShutdownProtobufLibrary();
return 0;
}