Merge branch 'master' into dstu2_final

This commit is contained in:
James Agnew 2015-07-30 22:34:45 -04:00
commit d9d192cc04
81 changed files with 4908 additions and 128 deletions

View File

@ -4,7 +4,7 @@ hapi-fhir
HAPI FHIR - Java API for HL7 FHIR Clients and Servers
[![Build Status](https://travis-ci.org/jamesagnew/hapi-fhir.svg?branch=master)](https://travis-ci.org/jamesagnew/hapi-fhir)
[![Coverage Status](https://coveralls.io/repos/jamesagnew/hapi-fhir/badge.svg?branch=master)](https://coveralls.io/r/jamesagnew/hapi-fhir?branch=master)
[![Coverage Status](https://coveralls.io/repos/jamesagnew/hapi-fhir/badge.svg?branch=master&service=github)](https://coveralls.io/github/jamesagnew/hapi-fhir?branch=master)
[![Maven Central](https://maven-badges.herokuapp.com/maven-central/ca.uhn.hapi.fhir/hapi-fhir-base/badge.svg)](http://search.maven.org/#search|ga|1|ca.uhn.hapi.fhir)
Complete project documentation is available here:

View File

@ -269,6 +269,7 @@ public class GenericClientExample {
.encodedJson()
.where(Patient.BIRTHDATE.beforeOrEquals().day("2012-01-22"))
.and(Patient.BIRTHDATE.after().day("2011-01-01"))
.withTag("http://acme.org/codes", "needs-review")
.include(Patient.INCLUDE_ORGANIZATION)
.revInclude(Provenance.INCLUDE_TARGET)
.lastUpdated(new DateRangeParam("2011-01-01", null))

View File

@ -23,7 +23,7 @@
<dependency>
<groupId>org.mortbay.jetty</groupId>
<artifactId>jetty</artifactId>
<version>6.1.26</version>
<version>6.1.26</version> <!-- 6.1.26 -->
</dependency>
<dependency>
<groupId>org.slf4j</groupId>

View File

@ -129,6 +129,7 @@ import ca.uhn.fhir.rest.method.ValidateMethodBindingDstu1;
import ca.uhn.fhir.rest.method.ValidateMethodBindingDstu2;
import ca.uhn.fhir.rest.param.DateParam;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.server.Constants;
import ca.uhn.fhir.rest.server.EncodingEnum;
import ca.uhn.fhir.rest.server.IVersionSpecificBundleFactory;
@ -1556,6 +1557,10 @@ public class GenericClient extends BaseClient implements IGenericClient {
myCriterion.populateParamList(params);
for (TokenParam next : myTags) {
addParam(params, Constants.PARAM_TAG, next.getValueAsQueryToken());
}
for (Include next : myInclude) {
addParam(params, Constants.PARAM_INCLUDE, next.getValue());
}
@ -1689,6 +1694,15 @@ public class GenericClient extends BaseClient implements IGenericClient {
return this;
}
private List<TokenParam> myTags = new ArrayList<TokenParam>();
@Override
public IQuery<Object> withTag(String theSystem, String theCode) {
Validate.notBlank(theCode, "theCode must not be null or empty");
myTags.add(new TokenParam(theSystem, theCode));
return this;
}
}
@SuppressWarnings("rawtypes")

View File

@ -251,6 +251,12 @@ public interface IGenericClient extends IRestfulClient {
@Override
void registerInterceptor(IClientInterceptor theInterceptor);
/**
* Search for resources matching a given set of criteria. Searching is a very powerful
* feature in FHIR with many features for specifying exactly what should be seaerched for
* and how it should be returned. See the <a href="http://www.hl7.org/fhir/search.html">specification on search</a>
* for more information.
*/
IUntypedQuery search();
/**

View File

@ -37,6 +37,14 @@ public interface IQuery<T> extends IClientExecutable<IQuery<T>, T>, IBaseQuery<I
IQuery<T> limitTo(int theLimitTo);
/**
* Match only resources where the resource has the given tag. This parameter corresponds to
* the <code>_tag</code> URL parameter.
* @param theSystem The tag code system, or <code>null</code> to match any code system (this may not be supported on all servers)
* @param theCode The tag code. Must not be <code>null</code> or empty.
*/
IQuery<T> withTag(String theSystem, String theCode);
/**
* Forces the query to perform the search using the given method (allowable methods are described in the
* <a href="http://www.hl7.org/implement/standards/fhir/http.html#search">FHIR Specification Section 2.1.11</a>)

View File

@ -110,6 +110,7 @@ public class Constants {
public static final String PARAM_SORT_ASC = "_sort:asc";
public static final String PARAM_SORT_DESC = "_sort:desc";
public static final String PARAM_TAGS = "_tags";
public static final String PARAM_TAG = "_tag";
public static final String PARAM_VALIDATE = "_validate";
public static final String PARAMQUALIFIER_MISSING = ":missing";
public static final String PARAMQUALIFIER_MISSING_FALSE = "false";

View File

@ -35,21 +35,16 @@ public class IncomingRequestAddressStrategy implements IServerAddressStrategy {
@Override
public String determineServerBase(ServletContext theServletContext, HttpServletRequest theRequest) {
String requestFullPath = StringUtils.defaultString(theRequest.getRequestURI());
String servletPath;
if (myServletPath != null) {
servletPath = myServletPath;
} else {
servletPath = StringUtils.defaultString(theRequest.getServletPath());
}
StringBuffer requestUrl = theRequest.getRequestURL();
String servletContextPath = "";
if (theServletContext != null) {
servletContextPath = StringUtils.defaultString(theServletContext.getContextPath());
// } else {
// servletContextPath = servletPath;
}
String servletContextPath = StringUtils.defaultString(theRequest.getContextPath());
String requestPath = requestFullPath.substring(servletContextPath.length() + servletPath.length());
if (requestPath.length() > 0 && requestPath.charAt(0) == '/') {
@ -105,4 +100,33 @@ public class IncomingRequestAddressStrategy implements IServerAddressStrategy {
myServletPath = theServletPath;
}
/**
* Determines the servlet's context path.
*
* This is here to try and deal with the wide variation in servers and what they return.
*
* getServletContext().getContextPath() is supposed to return the path to the specific servlet we are deployed as but it's not available everywhere. On some servers getServletContext() can return
* null (old Jetty seems to suffer from this, see hapi-fhir-base-test-mindeps-server) and on other old servers (Servlet 2.4) getServletContext().getContextPath() doesn't even exist.
*
* theRequest.getContextPath() returns the context for the specific incoming request. It should be available everywhere, but it's likely to be less predicable if there are multiple servlet mappings
* pointing to the same servlet, so we don't favour it. This is possibly not the best strategy (maybe we should just always use theRequest.getContextPath()?) but so far people seem happy with this
* behavour across a wide variety of platforms.
*
* If you are having troubles on a given platform/configuration and want to suggest a change or even report incompatibility here, we'd love to hear about it.
*/
public static String determineServletContextPath(HttpServletRequest theRequest, RestfulServer server) {
String retVal;
if (server.getServletContext() != null) {
if (server.getServletContext().getMajorVersion() >= 3 || (server.getServletContext().getMajorVersion() > 2 && server.getServletContext().getMinorVersion() >= 5)) {
retVal = server.getServletContext().getContextPath();
} else {
retVal = theRequest.getContextPath();
}
} else {
retVal = theRequest.getContextPath();
}
retVal = StringUtils.defaultString(retVal);
return retVal;
}
}

View File

@ -525,15 +525,11 @@ public class RestfulServer extends HttpServlet {
String requestFullPath = StringUtils.defaultString(theRequest.getRequestURI());
String servletPath = StringUtils.defaultString(theRequest.getServletPath());
StringBuffer requestUrl = theRequest.getRequestURL();
String servletContextPath = "";
// if (getServletContext().getMajorVersion() >= 3) {
// // getServletContext is only supported in version 3+ of servlet-api
if (getServletContext() != null) {
servletContextPath = StringUtils.defaultString(getServletContext().getContextPath());
}
// }
String servletContextPath = IncomingRequestAddressStrategy.determineServletContextPath(theRequest, this);
/*
* Just for debugging..
*/
if (ourLog.isTraceEnabled()) {
ourLog.trace("Request FullPath: {}", requestFullPath);
ourLog.trace("Servlet Path: {}", servletPath);
@ -752,6 +748,7 @@ public class RestfulServer extends HttpServlet {
}
}
/**
* Initializes the server. Note that this method is final to avoid accidentally introducing bugs in implementations,
* but subclasses may put initialization code in {@link #initialize()}, which is called immediately before beginning
@ -842,6 +839,7 @@ public class RestfulServer extends HttpServlet {
* (which extends {@link ServletException}), as this is a flag to the servlet container that the servlet
* is not usable.
*/
@SuppressWarnings("unused")
protected void initialize() throws ServletException {
// nothing by default
}

View File

@ -59,7 +59,6 @@ import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.omg.PortableInterceptor.InterceptorOperations;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Required;
import org.springframework.transaction.PlatformTransactionManager;
@ -86,6 +85,7 @@ import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamString;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamToken;
import ca.uhn.fhir.jpa.entity.ResourceLink;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.ResourceTag;
import ca.uhn.fhir.jpa.entity.TagDefinition;
import ca.uhn.fhir.jpa.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.util.StopWatch;
@ -293,6 +293,71 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
return new HashSet<Long>(q.getResultList());
}
private Set<Long> addPredicateTag(Set<Long> thePids, List<List<? extends IQueryParameterType>> theList) {
Set<Long> pids = thePids;
if (theList == null || theList.isEmpty()) {
return pids;
}
for (List<? extends IQueryParameterType> nextAndParams : theList) {
boolean haveTags = false;
for (IQueryParameterType nextParamUncasted : nextAndParams) {
TokenParam nextParam = (TokenParam) nextParamUncasted;
if (isNotBlank(nextParam.getValue())) {
haveTags = true;
} else if (isNotBlank(nextParam.getSystem())) {
throw new InvalidRequestException("Invalid _tag parameter (must supply a value/code and not just a system): " + nextParam.getValueAsQueryToken());
}
}
if (!haveTags) {
continue;
}
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
CriteriaQuery<Long> cq = builder.createQuery(Long.class);
Root<ResourceTag> from = cq.from(ResourceTag.class);
cq.select(from.get("myResourceId").as(Long.class));
List<Predicate> andPredicates = new ArrayList<Predicate>();
andPredicates.add(builder.equal(from.get("myResourceType"), myResourceName));
List<Predicate> orPredicates = new ArrayList<Predicate>();
for (IQueryParameterType nextOrParams : nextAndParams) {
TokenParam nextParam = (TokenParam) nextOrParams;
From<ResourceTag, TagDefinition> defJoin = from.join("myTag");
Predicate codePrediate = builder.equal(defJoin.get("myCode"), nextParam.getValue());
if (isBlank(nextParam.getValue())) {
continue;
}
if (isNotBlank(nextParam.getSystem())) {
Predicate systemPrediate = builder.equal(defJoin.get("mySystem"), nextParam.getSystem());
orPredicates.add(builder.and(systemPrediate, codePrediate));
} else {
orPredicates.add(codePrediate);
}
}
if (orPredicates.isEmpty() == false) {
andPredicates.add(builder.or(orPredicates.toArray(new Predicate[0])));
}
Predicate masterCodePredicate = builder.and(andPredicates.toArray(new Predicate[0]));
if (pids.size() > 0) {
Predicate inPids = (from.get("myResourceId").in(pids));
cq.where(builder.and(masterCodePredicate, inPids));
} else {
cq.where(masterCodePredicate);
}
TypedQuery<Long> q = myEntityManager.createQuery(cq);
pids = new HashSet<Long>(q.getResultList());
}
return pids;
}
private boolean addPredicateMissingFalseIfPresent(CriteriaBuilder theBuilder, String theParamName, Root<? extends BaseResourceIndexedSearchParam> from, List<Predicate> codePredicates,
IQueryParameterType nextOr) {
boolean missingFalse = false;
@ -635,11 +700,11 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
RuntimeResourceDefinition resDef = getContext().getResourceDefinition(ref.getResourceType());
resourceTypes.add(resDef.getImplementingClass());
}
boolean foundChainMatch = false;
for (Class<? extends IBaseResource> nextType : resourceTypes) {
RuntimeResourceDefinition typeDef = getContext().getResourceDefinition(nextType);
String chain = ref.getChain();
String remainingChain = null;
int chainDotIndex = chain.indexOf('.');
@ -658,23 +723,23 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
ourLog.debug("Don't have a DAO for type {}", nextType.getSimpleName(), param);
continue;
}
IQueryParameterType chainValue;
if (remainingChain != null) {
if (param.getParamType() != RestSearchParameterTypeEnum.REFERENCE) {
ourLog.debug("Type {} parameter {} is not a reference, can not chain {}", new Object[] { nextType.getSimpleName(), chain, remainingChain });
continue;
}
chainValue = new ReferenceParam();
chainValue.setValueAsQueryToken(null, resourceId);
((ReferenceParam)chainValue).setChain(remainingChain);
((ReferenceParam) chainValue).setChain(remainingChain);
} else {
chainValue = toParameterType(param, resourceId);
}
foundChainMatch = true;
Set<Long> pids = dao.searchForIds(chain, chainValue);
if (pids.isEmpty()) {
continue;
@ -684,7 +749,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
codePredicates.add(eq);
}
if (!foundChainMatch) {
throw new InvalidRequestException(getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "invalidParameterChain", theParamName + '.' + ref.getChain()));
}
@ -894,7 +959,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
protected IBaseOperationOutcome createErrorOperationOutcome(String theMessage) {
return createOperationOutcome(IssueSeverityEnum.ERROR.getCode(), theMessage);
}
protected IBaseOperationOutcome createInfoOperationOutcome(String theMessage) {
return createOperationOutcome(IssueSeverityEnum.INFORMATION.getCode(), theMessage);
}
@ -1017,12 +1082,12 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
}
if (system != null && system.length() > ResourceIndexedSearchParamToken.MAX_LENGTH) {
throw new InvalidRequestException("Parameter[" + theParamName + "] has system (" + system.length() + ") that is longer than maximum allowed (" + ResourceIndexedSearchParamToken.MAX_LENGTH
+ "): " + system);
throw new InvalidRequestException(
"Parameter[" + theParamName + "] has system (" + system.length() + ") that is longer than maximum allowed (" + ResourceIndexedSearchParamToken.MAX_LENGTH + "): " + system);
}
if (code != null && code.length() > ResourceIndexedSearchParamToken.MAX_LENGTH) {
throw new InvalidRequestException("Parameter[" + theParamName + "] has code (" + code.length() + ") that is longer than maximum allowed (" + ResourceIndexedSearchParamToken.MAX_LENGTH
+ "): " + code);
throw new InvalidRequestException(
"Parameter[" + theParamName + "] has code (" + code.length() + ") that is longer than maximum allowed (" + ResourceIndexedSearchParamToken.MAX_LENGTH + "): " + code);
}
ArrayList<Predicate> singleCodePredicates = (new ArrayList<Predicate>());
@ -1096,13 +1161,13 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
}
From<?, ?> stringJoin = theFrom.join(joinAttrName, JoinType.INNER);
if (param.getParamType() == RestSearchParameterTypeEnum.REFERENCE) {
thePredicates.add(stringJoin.get("mySourcePath").as(String.class).in(param.getPathsSplit()));
} else {
thePredicates.add(theBuilder.equal(stringJoin.get("myParamName"), theSort.getParamName()));
}
// Predicate p = theBuilder.equal(stringJoin.get("myParamName"), theSort.getParamName());
// Predicate pn = theBuilder.isNull(stringJoin.get("myParamName"));
// thePredicates.add(theBuilder.or(p, pn));
@ -1197,7 +1262,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
DaoMethodOutcome outcome = toMethodOutcome(entity, theResource).setCreated(true);
notifyWriteCompleted();
String msg = getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "successfulCreate", outcome.getId(), w.getMillisAndRestart());
outcome.setOperationOutcome(createInfoOperationOutcome(msg));
@ -1594,8 +1659,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
if (entity == null) {
if (theId.hasVersionIdPart()) {
TypedQuery<ResourceHistoryTable> q = myEntityManager.createQuery(
"SELECT t from ResourceHistoryTable t WHERE t.myResourceId = :RID AND t.myResourceType = :RTYP AND t.myResourceVersion = :RVER", ResourceHistoryTable.class);
TypedQuery<ResourceHistoryTable> q = myEntityManager
.createQuery("SELECT t from ResourceHistoryTable t WHERE t.myResourceId = :RID AND t.myResourceType = :RTYP AND t.myResourceVersion = :RVER", ResourceHistoryTable.class);
q.setParameter("RID", pid);
q.setParameter("RTYP", myResourceName);
q.setParameter("RVER", Long.parseLong(theId.getVersionIdPart()));
@ -1930,6 +1995,10 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
pids = addPredicateLanguage(pids, nextParamEntry.getValue());
} else if (nextParamName.equals("_tag")) {
pids = addPredicateTag(pids, nextParamEntry.getValue());
} else {
RuntimeSearchParam nextParamDef = resourceDef.getSearchParam(nextParamName);
@ -2136,7 +2205,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
ResourceTable savedEntity = updateEntity(theResource, entity, true, null, thePerformIndexing, true);
notifyWriteCompleted();
DaoMethodOutcome outcome = toMethodOutcome(savedEntity, theResource).setCreated(false);
String msg = getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "successfulCreate", outcome.getId(), w.getMillisAndRestart());
@ -2160,8 +2229,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
private void validateResourceType(BaseHasResource entity) {
if (!myResourceName.equals(entity.getResourceType())) {
throw new ResourceNotFoundException("Resource with ID " + entity.getIdDt().getIdPart() + " exists but it is not of type " + myResourceName + ", found resource of type "
+ entity.getResourceType());
throw new ResourceNotFoundException(
"Resource with ID " + entity.getIdDt().getIdPart() + " exists but it is not of type " + myResourceName + ", found resource of type " + entity.getResourceType());
}
}

View File

@ -44,12 +44,12 @@ public class ResourceTag extends BaseTag {
@JoinColumn(name = "RES_ID", referencedColumnName = "RES_ID")
private ResourceTable myResource;
@Column(name = "RES_TYPE", length = ResourceTable.RESTYPE_LEN,nullable=false)
@Column(name = "RES_TYPE", length = ResourceTable.RESTYPE_LEN, nullable = false)
private String myResourceType;
@Column(name="RES_ID", insertable=false,updatable=false)
@Column(name = "RES_ID", insertable = false, updatable = false)
private Long myResourceId;
public Long getResourceId() {
return myResourceId;
}

View File

@ -39,9 +39,7 @@ import ca.uhn.fhir.model.api.Tag;
//@formatter:on
@Entity
@Table(name = "HFJ_TAG_DEF", uniqueConstraints = {
@UniqueConstraint(columnNames = { "TAG_TYPE", "TAG_SYSTEM", "TAG_CODE" })
})
@Table(name = "HFJ_TAG_DEF", uniqueConstraints = { @UniqueConstraint(columnNames = { "TAG_TYPE", "TAG_SYSTEM", "TAG_CODE" }) })
//@formatter:off
public class TagDefinition implements Serializable {
@ -89,6 +87,10 @@ public class TagDefinition implements Serializable {
return myDisplay;
}
public Long getId() {
return myId;
}
public String getSystem() {
return mySystem;
}

View File

@ -87,6 +87,7 @@ import ca.uhn.fhir.rest.param.NumberParam;
import ca.uhn.fhir.rest.param.QuantityParam;
import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenAndListParam;
import ca.uhn.fhir.rest.param.TokenOrListParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.server.IBundleProvider;
@ -1873,6 +1874,80 @@ public class FhirResourceDaoDstu2Test extends BaseJpaTest {
}
@Test
public void testSearchWithTagParameter() {
String methodName = "testSearchWithTagParameter";
IIdType tag1id;
{
Organization org = new Organization();
org.getNameElement().setValue("FOO");
TagList tagList = new TagList();
tagList.addTag("urn:taglist", methodName + "1a");
tagList.addTag("urn:taglist", methodName + "1b");
ResourceMetadataKeyEnum.TAG_LIST.put(org, tagList);
tag1id = ourOrganizationDao.create(org).getId().toUnqualifiedVersionless();
}
IIdType tag2id;
{
Organization org = new Organization();
org.getNameElement().setValue("FOO");
TagList tagList = new TagList();
tagList.addTag("urn:taglist", methodName + "2a");
tagList.addTag("urn:taglist", methodName + "2b");
ResourceMetadataKeyEnum.TAG_LIST.put(org, tagList);
tag2id = ourOrganizationDao.create(org).getId().toUnqualifiedVersionless();
}
{
// One tag
SearchParameterMap params = new SearchParameterMap();
params.add("_tag", new TokenParam("urn:taglist", methodName + "1a"));
List<IIdType> patients = toUnqualifiedVersionlessIds(ourOrganizationDao.search(params));
assertThat(patients, containsInAnyOrder(tag1id));
}
{
// Code only
SearchParameterMap params = new SearchParameterMap();
params.add("_tag", new TokenParam(null, methodName + "1a"));
List<IIdType> patients = toUnqualifiedVersionlessIds(ourOrganizationDao.search(params));
assertThat(patients, containsInAnyOrder(tag1id));
}
{
// Or tags
SearchParameterMap params = new SearchParameterMap();
TokenOrListParam orListParam = new TokenOrListParam();
orListParam.add(new TokenParam("urn:taglist", methodName + "1a"));
orListParam.add(new TokenParam("urn:taglist", methodName + "2a"));
params.add("_tag", orListParam);
List<IIdType> patients = toUnqualifiedVersionlessIds(ourOrganizationDao.search(params));
assertThat(patients, containsInAnyOrder(tag1id, tag2id));
}
// TODO: get multiple/AND working
{
// And tags
SearchParameterMap params = new SearchParameterMap();
TokenAndListParam andListParam = new TokenAndListParam();
andListParam.addValue(new TokenOrListParam("urn:taglist", methodName + "1a"));
andListParam.addValue(new TokenOrListParam("urn:taglist", methodName + "2a"));
params.add("_tag", andListParam);
List<IIdType> patients = toUnqualifiedVersionlessIds(ourOrganizationDao.search(params));
assertEquals(0, patients.size());
}
{
// And tags
SearchParameterMap params = new SearchParameterMap();
TokenAndListParam andListParam = new TokenAndListParam();
andListParam.addValue(new TokenOrListParam("urn:taglist", methodName + "1a"));
andListParam.addValue(new TokenOrListParam("urn:taglist", methodName + "1b"));
params.add("_tag", andListParam);
List<IIdType> patients = toUnqualifiedVersionlessIds(ourOrganizationDao.search(params));
assertThat(patients, containsInAnyOrder(tag1id));
}
}
@Test
public void testSearchWithIncludes() {
IIdType parentOrgId;

View File

@ -693,6 +693,34 @@ public class GenericClientTest {
}
@SuppressWarnings("unused")
@Test
public void testSearchByTag() throws Exception {
String msg = getPatientFeedWithOneResult();
ArgumentCaptor<HttpUriRequest> capt = ArgumentCaptor.forClass(HttpUriRequest.class);
when(myHttpClient.execute(capt.capture())).thenReturn(myHttpResponse);
when(myHttpResponse.getStatusLine()).thenReturn(new BasicStatusLine(new ProtocolVersion("HTTP", 1, 1), 200, "OK"));
when(myHttpResponse.getEntity().getContentType()).thenReturn(new BasicHeader("content-type", Constants.CT_FHIR_XML + "; charset=UTF-8"));
when(myHttpResponse.getEntity().getContent()).thenReturn(new ReaderInputStream(new StringReader(msg), Charset.forName("UTF-8")));
IGenericClient client = ourCtx.newRestfulGenericClient("http://example.com/fhir");
//@formatter:off
Bundle response = client.search()
.forResource(Patient.class)
.withTag("urn:foo", "123")
.withTag("urn:bar", "456")
.execute();
//@formatter:on
assertEquals(
"http://example.com/fhir/Patient?_tag=urn%3Afoo%7C123&_tag=urn%3Abar%7C456",
capt.getValue().getURI().toString());
}
@SuppressWarnings("unused")
@Test
public void testSearchWithReverseInclude() throws Exception {

View File

@ -220,6 +220,8 @@ public class IncomingRequestAddressStrategyTest {
private static class MyServlet extends HttpServlet {
private static final long serialVersionUID = -8903322104434705422L;
@Override
protected void doGet(HttpServletRequest theReq, HttpServletResponse theResp) throws ServletException, IOException {

View File

@ -0,0 +1,188 @@
package ca.uhn.fhir.rest.server;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import javax.servlet.ServletException;
import org.apache.commons.io.IOUtils;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletHolder;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import ca.uhn.fhir.model.dstu.resource.Patient;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.annotation.IdParam;
import ca.uhn.fhir.rest.annotation.Read;
import ca.uhn.fhir.util.RandomServerPortProvider;
public class ServletContextParsingTest {
private static CloseableHttpClient ourClient;
private static IdDt ourLastId;
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ServletContextParsingTest.class);
private Server myServer;
public void after() throws Exception {
if (myServer != null) {
myServer.stop();
}
}
@Before
public void before() {
ourLastId = null;
}
private void httpGet(String url) throws IOException, ClientProtocolException {
ourLastId = null;
HttpGet httpget = new HttpGet(url);
HttpResponse status = ourClient.execute(httpget);
String responseContent = IOUtils.toString(status.getEntity().getContent());
IOUtils.closeQuietly(status.getEntity().getContent());
ourLog.info(responseContent);
}
private void startServer(int port, String contextPath, String servletPath) throws Exception {
myServer = new Server(port);
org.eclipse.jetty.servlet.ServletContextHandler proxyHandler = new org.eclipse.jetty.servlet.ServletContextHandler();
proxyHandler.setContextPath(contextPath);
ServletHolder handler = new ServletHolder();
handler.setServlet(new MyServlet());
proxyHandler.addServlet(handler, servletPath);
myServer.setHandler(proxyHandler);
myServer.start();
}
@Test
public void testUnderJettyWithContextPathServletRoot() throws Exception {
int port = RandomServerPortProvider.findFreePort();
String contextPath = "/ctx";
String servletPath = "/*";
startServer(port, contextPath, servletPath);
httpGet("http://localhost:" + port + "/ctx/Patient/123/_history/234?_pretty=true");
assertEquals("Patient/123/_history/234", ourLastId.getValue());
}
@Test
public void testUnderJettyWithContextPathServletRoot2() throws Exception {
int port = RandomServerPortProvider.findFreePort();
String contextPath = "/ctx";
String servletPath = "/foo/bar/*"; // not /* but still this should work
startServer(port, contextPath, servletPath);
httpGet("http://localhost:" + port + "/ctx/foo/bar/Patient/123/_history/222");
assertEquals("Patient/123/_history/222", ourLastId.getValue());
}
@Test
public void testUnderJettyWithContextPathServletPath() throws Exception {
int port = RandomServerPortProvider.findFreePort();
String contextPath = "/ctx";
String servletPath = "/servlet/*";
startServer(port, contextPath, servletPath);
httpGet("http://localhost:" + port + "/ctx/servlet/Patient/123/_history/222");
assertEquals("Patient/123/_history/222", ourLastId.getValue());
}
@Test
public void testUnderJettyWithMultiplePaths() throws Exception {
int port = RandomServerPortProvider.findFreePort();
myServer = new Server(port);
org.eclipse.jetty.servlet.ServletContextHandler proxyHandler = new org.eclipse.jetty.servlet.ServletContextHandler();
proxyHandler.setContextPath("/ctx");
proxyHandler.addServlet(new ServletHolder(new MyServlet()), "/servlet/*");
proxyHandler.addServlet(new ServletHolder(new MyServlet()), "/foo/bar/*");
myServer.setHandler(proxyHandler);
myServer.start();
httpGet("http://localhost:" + port + "/ctx/servlet/Patient/123/_history/222");
assertEquals("Patient/123/_history/222", ourLastId.getValue());
httpGet("http://localhost:" + port + "/ctx/foo/bar/Patient/123/_history/222");
assertEquals("Patient/123/_history/222", ourLastId.getValue());
}
@Test
public void testUnderJettyWithContextRootServletRoot() throws Exception {
int port = RandomServerPortProvider.findFreePort();
String contextPath = "/";
String servletPath = "/*";
startServer(port, contextPath, servletPath);
httpGet("http://localhost:" + port + "/Patient/123/_history/222");
assertEquals("Patient/123/_history/222", ourLastId.getValue());
}
@BeforeClass
public static void beforeClass() {
PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS);
HttpClientBuilder builder = HttpClientBuilder.create();
builder.setConnectionManager(connectionManager);
ourClient = builder.build();
}
private static class MyServlet extends RestfulServer {
private static final long serialVersionUID = -8903322104434705422L;
@Override
protected void initialize() throws ServletException {
setResourceProviders(new MyPatientProvider());
}
}
public static class MyPatientProvider implements IResourceProvider {
@Override
public Class<? extends IBaseResource> getResourceType() {
return Patient.class;
}
@Read(version=true)
public Patient read(@IdParam IdDt theId) {
ourLastId = theId;
Patient retVal = new Patient();
retVal.setId(theId);
return retVal;
}
}
}

View File

@ -1,5 +1,7 @@
package ca.uhn.fhir.validation;
import static org.apache.commons.lang3.StringUtils.isBlank;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
@ -10,9 +12,11 @@ import org.hl7.fhir.instance.model.QuestionnaireAnswers;
import org.hl7.fhir.instance.model.ValueSet;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.valuesets.IssueType;
import org.hl7.fhir.instance.utils.WorkerContext;
import org.hl7.fhir.instance.validation.QuestionnaireAnswersValidator;
import org.hl7.fhir.instance.validation.ValidationMessage;
import org.hl7.fhir.instance.validation.ValidationMessage.Source;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.parser.IParser;
@ -87,7 +91,9 @@ public class FhirQuestionnaireAnswersValidator extends BaseValidatorBridge {
for (ResourceReferenceInfo nextRefInfo : refs) {
IIdType nextRef = nextRefInfo.getResourceReference().getReferenceElement();
String resourceType = nextRef.getResourceType();
if ("ValueSet".equals(resourceType)) {
if (isBlank(resourceType)) {
theMessages.add(new ValidationMessage(Source.QuestionnaireAnswersValidator, IssueType.INVALID, null, "Invalid reference '" + nextRef.getValue() + "' - Does not identify resource type", IssueSeverity.FATAL));
} else if ("ValueSet".equals(resourceType)) {
if (!theWorkerCtx.getValueSets().containsKey(nextRef.getValue())) {
ValueSet resource = tryToLoad(ValueSet.class, nextRef, theMessages);
if (resource == null) {

View File

@ -37,12 +37,31 @@ import org.hl7.fhir.utilities.Utilities;
import org.hl7.fhir.instance.model.annotations.Child;
import org.hl7.fhir.instance.model.annotations.Description;
import org.hl7.fhir.instance.model.annotations.DatatypeDef;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.annotations.Block;
import org.hl7.fhir.instance.model.api.*;
/**
* Base definition for all elements in a resource.
*/
public abstract class Element extends Base implements IBaseHasExtensions {
/**
* Returns an unmodifiable list containing all extensions on this element which
* match the given URL.
*
* @param theUrl The URL. Must not be blank or null.
* @return an unmodifiable list containing all extensions on this element which
* match the given URL
*/
public List<Extension> getExtensionsByUrl(String theUrl) {
Validate.notBlank(theUrl, "theUrl must not be blank or null");
ArrayList<Extension> retVal = new ArrayList<Extension>();
for (Extension next : getExtension()) {
if (theUrl.equals(next.getUrl())) {
retVal.add(next);
}
}
return Collections.unmodifiableList(retVal);
}
/**
* unique id for the element within a resource (for internal references).

View File

@ -14,6 +14,7 @@ import org.hl7.fhir.instance.client.ResourceFormat;
import org.hl7.fhir.instance.model.Bundle;
import org.hl7.fhir.instance.model.ConceptMap;
import org.hl7.fhir.instance.model.Conformance;
import org.hl7.fhir.instance.model.DataElement;
import org.hl7.fhir.instance.model.ElementDefinition.TypeRefComponent;
import org.hl7.fhir.instance.model.OperationOutcome;
import org.hl7.fhir.instance.model.Parameters;
@ -52,6 +53,7 @@ public class WorkerContext implements NameResolver {
private ITerminologyServices terminologyServices = new NullTerminologyServices();
private IFHIRClient client = new NullClient();
private Map<String, ValueSet> codeSystems = new HashMap<String, ValueSet>();
private Map<String, DataElement> dataElements = new HashMap<String, DataElement>();
private Map<String, ValueSet> valueSets = new HashMap<String, ValueSet>();
private Map<String, ConceptMap> maps = new HashMap<String, ConceptMap>();
private Map<String, StructureDefinition> profiles = new HashMap<String, StructureDefinition>();
@ -97,6 +99,10 @@ public class WorkerContext implements NameResolver {
return codeSystems;
}
public Map<String, DataElement> getDataElements() {
return dataElements;
}
public Map<String, ValueSet> getValueSets() {
return valueSets;
}

View File

@ -64,7 +64,7 @@ public class BaseValidator {
*/
protected boolean fail(List<ValidationMessage> errors, IssueType type, List<String> pathParts, boolean thePass, String msg) {
if (!thePass) {
String path = StringUtils.join(pathParts, '.');
String path = toPath(pathParts);
errors.add(new ValidationMessage(source, type, -1, -1, path, msg, IssueSeverity.FATAL));
}
return thePass;
@ -79,7 +79,7 @@ public class BaseValidator {
*/
protected boolean fail(List<ValidationMessage> errors, IssueType type, List<String> pathParts, boolean thePass, String theMessage, Object... theMessageArguments) {
if (!thePass) {
String path = StringUtils.join(pathParts, '.');
String path = toPath(pathParts);
errors.add(new ValidationMessage(source, type, -1, -1, path, formatMessage(theMessage, theMessageArguments), IssueSeverity.FATAL));
}
return thePass;
@ -151,7 +151,7 @@ public class BaseValidator {
*/
protected boolean hint(List<ValidationMessage> errors, IssueType type, List<String> pathParts, boolean thePass, String theMessage, Object... theMessageArguments) {
if (!thePass) {
String path = StringUtils.join(pathParts, '.');
String path = toPath(pathParts);
String message = formatMessage(theMessage, theMessageArguments);
errors.add(new ValidationMessage(source, type, -1, -1, path, message, IssueSeverity.INFORMATION));
}
@ -181,7 +181,7 @@ public class BaseValidator {
*/
protected boolean rule(List<ValidationMessage> errors, IssueType type, List<String> pathParts, boolean thePass, String msg) {
if (!thePass) {
String path = StringUtils.join(pathParts, '.');
String path = toPath(pathParts);
errors.add(new ValidationMessage(source, type, -1, -1, path, msg, IssueSeverity.ERROR));
}
return thePass;
@ -196,13 +196,20 @@ public class BaseValidator {
*/
protected boolean rule(List<ValidationMessage> errors, IssueType type, List<String> pathParts, boolean thePass, String theMessage, Object... theMessageArguments) {
if (!thePass) {
String path = StringUtils.join(pathParts, '.');
String path = toPath(pathParts);
String message = formatMessage(theMessage, theMessageArguments);
errors.add(new ValidationMessage(source, type, -1, -1, path, message, IssueSeverity.ERROR));
}
return thePass;
}
private String toPath(List<String> pathParts) {
if (pathParts == null || pathParts.isEmpty()) {
return "";
}
return "//" + StringUtils.join(pathParts, '/');
}
/**
* Test a rule and add a {@link IssueSeverity#ERROR} validation message if the validation fails
*
@ -295,4 +302,20 @@ public class BaseValidator {
return thePass;
}
/**
* Test a rule and add a {@link IssueSeverity#WARNING} validation message if the validation fails
*
* @param thePass
* Set this parameter to <code>false</code> if the validation does not pass
* @return Returns <code>thePass</code> (in other words, returns <code>true</code> if the rule did not fail validation)
*/
protected boolean warning(List<ValidationMessage> errors, IssueType type, List<String> pathParts, boolean thePass, String theMessage, Object... theMessageArguments) {
if (!thePass) {
String path = toPath(pathParts);
String message = formatMessage(theMessage, theMessageArguments);
errors.add(new ValidationMessage(source, type, -1, -1, path, message, IssueSeverity.WARNING));
}
return thePass;
}
}

View File

@ -10,15 +10,20 @@ import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.Attachment;
import org.hl7.fhir.instance.model.BooleanType;
import org.hl7.fhir.instance.model.Coding;
import org.hl7.fhir.instance.model.DataElement;
import org.hl7.fhir.instance.model.DateTimeType;
import org.hl7.fhir.instance.model.DateType;
import org.hl7.fhir.instance.model.DecimalType;
import org.hl7.fhir.instance.model.ElementDefinition;
import org.hl7.fhir.instance.model.Extension;
import org.hl7.fhir.instance.model.InstantType;
import org.hl7.fhir.instance.model.IntegerType;
import org.hl7.fhir.instance.model.OperationOutcome.IssueSeverity;
import org.hl7.fhir.instance.model.Quantity;
import org.hl7.fhir.instance.model.Questionnaire;
import org.hl7.fhir.instance.model.Questionnaire.AnswerFormat;
@ -35,6 +40,8 @@ import org.hl7.fhir.instance.model.Type;
import org.hl7.fhir.instance.model.UriType;
import org.hl7.fhir.instance.model.ValueSet;
import org.hl7.fhir.instance.model.ValueSet.ConceptDefinitionComponent;
import org.hl7.fhir.instance.model.ValueSet.ConceptReferenceComponent;
import org.hl7.fhir.instance.model.ValueSet.ConceptSetComponent;
import org.hl7.fhir.instance.model.valuesets.IssueType;
import org.hl7.fhir.instance.utils.WorkerContext;
@ -45,13 +52,14 @@ import org.hl7.fhir.instance.utils.WorkerContext;
*/
public class QuestionnaireAnswersValidator extends BaseValidator {
/* *****************************************************************
* Note to anyone working on this class -
/*
* ***************************************************************** Note to anyone working on this class -
*
* This class has unit tests which run within the HAPI project build. Please sync any changes here to HAPI and ensure that unit tests are run.
* ****************************************************************
*/
private static final List<String> EMPTY_PATH = Collections.emptyList();
private WorkerContext myWorkerCtx;
public QuestionnaireAnswersValidator(WorkerContext theWorkerCtx) {
@ -78,11 +86,13 @@ public class QuestionnaireAnswersValidator extends BaseValidator {
}
private List<org.hl7.fhir.instance.model.QuestionnaireAnswers.GroupComponent> findGroupByLinkId(List<org.hl7.fhir.instance.model.QuestionnaireAnswers.GroupComponent> theGroups, String theLinkId) {
Validate.notBlank(theLinkId, "theLinkId must not be blank");
ArrayList<org.hl7.fhir.instance.model.QuestionnaireAnswers.GroupComponent> retVal = new ArrayList<QuestionnaireAnswers.GroupComponent>();
for (org.hl7.fhir.instance.model.QuestionnaireAnswers.GroupComponent next : theGroups) {
if (theLinkId.equals(next.getLinkId())) {
if (theLinkId == null) {
if (next.getLinkId() == null) {
retVal.add(next);
}
} else if (theLinkId.equals(next.getLinkId())) {
retVal.add(next);
}
}
@ -100,6 +110,9 @@ public class QuestionnaireAnswersValidator extends BaseValidator {
Reference questionnaireRef = theAnswers.getQuestionnaire();
Questionnaire questionnaire = getQuestionnaire(theAnswers, questionnaireRef);
if (questionnaire == null && theErrors.size() > 0 && theErrors.get(theErrors.size() - 1).getLevel() == IssueSeverity.FATAL) {
return;
}
if (!fail(theErrors, IssueType.INVALID, pathStack, questionnaire != null, "Questionnaire {0} is not found in the WorkerContext", theAnswers.getQuestionnaire().getReference())) {
return;
}
@ -111,7 +124,7 @@ public class QuestionnaireAnswersValidator extends BaseValidator {
}
pathStack.removeLast();
pathStack.add("group(0)");
pathStack.add("group[0]");
validateGroup(theErrors, questionnaire.getGroup(), theAnswers.getGroup(), pathStack, theAnswers, validateRequired);
}
@ -169,7 +182,7 @@ public class QuestionnaireAnswersValidator extends BaseValidator {
// Check that there are no extra answers
for (int i = 0; i < theAnsGroup.getQuestion().size(); i++) {
org.hl7.fhir.instance.model.QuestionnaireAnswers.QuestionComponent nextQuestion = theAnsGroup.getQuestion().get(i);
thePathStack.add("question(" + i + ")");
thePathStack.add("question[" + i + "]");
rule(theErrors, IssueType.BUSINESSRULE, thePathStack, allowedQuestions.contains(nextQuestion.getLinkId()), "Found answer with linkId[{0}] but this ID is not allowed at this position",
nextQuestion.getLinkId());
thePathStack.remove();
@ -181,43 +194,69 @@ public class QuestionnaireAnswersValidator extends BaseValidator {
private void validateQuestion(List<ValidationMessage> theErrors, QuestionComponent theQuestion, org.hl7.fhir.instance.model.QuestionnaireAnswers.GroupComponent theAnsGroup,
LinkedList<String> thePathStack, QuestionnaireAnswers theAnswers, boolean theValidateRequired) {
String linkId = theQuestion.getLinkId();
QuestionComponent question = theQuestion;
String linkId = question.getLinkId();
if (!fail(theErrors, IssueType.INVALID, thePathStack, isNotBlank(linkId), "Questionnaire is invalid, question found with no link ID")) {
return;
}
AnswerFormat type = theQuestion.getType();
AnswerFormat type = question.getType();
if (type == null) {
if (theQuestion.getGroup().isEmpty()) {
rule(theErrors, IssueType.INVALID, thePathStack, false, "Questionnaire in invalid, no type and no groups specified for question with link ID[{0}]", linkId);
return;
// Support old format/casing and new
List<Extension> extensions = question.getExtensionsByUrl("http://hl7.org/fhir/StructureDefinition/questionnaire-deReference");
if (extensions.isEmpty()) {
extensions = question.getExtensionsByUrl("http://hl7.org/fhir/StructureDefinition/questionnaire-dereference");
}
if (extensions.isEmpty() == false) {
if (extensions.size() > 1) {
warning(theErrors, IssueType.BUSINESSRULE, thePathStack, false, "Questionnaire is invalid, element contains multiple extensions with URL 'questionnaire-dereference', maximum one may be contained in a single element");
}
return;
/*
* Hopefully we will implement this soon...
*/
// Extension ext = extensions.get(0);
// Reference ref = (Reference) ext.getValue();
// DataElement de = myWorkerCtx.getDataElements().get(ref.getReference());
// if (de.getElement().size() != 1) {
// warning(theErrors, IssueType.BUSINESSRULE, EMPTY_PATH, false, "DataElement {0} has wrong number of elements: {1}", ref.getReference(), de.getElement().size());
// }
// ElementDefinition element = de.getElement().get(0);
// question = toQuestion(element);
} else {
if (question.getGroup().isEmpty()) {
rule(theErrors, IssueType.INVALID, thePathStack, false, "Questionnaire is invalid, no type and no groups specified for question with link ID[{0}]", linkId);
return;
}
type = AnswerFormat.NULL;
}
type = AnswerFormat.NULL;
}
List<org.hl7.fhir.instance.model.QuestionnaireAnswers.QuestionComponent> answers = findAnswersByLinkId(theAnsGroup.getQuestion(), linkId);
if (answers.size() > 1) {
rule(theErrors, IssueType.BUSINESSRULE, thePathStack, !theQuestion.getRequired(), "Multiple answers repetitions found with linkId[{0}]", linkId);
rule(theErrors, IssueType.BUSINESSRULE, thePathStack, !question.getRequired(), "Multiple answers repetitions found with linkId[{0}]", linkId);
}
if (answers.size() == 0) {
if (theValidateRequired) {
rule(theErrors, IssueType.BUSINESSRULE, thePathStack, !theQuestion.getRequired(), "Missing answer to required question with linkId[{0}]", linkId);
rule(theErrors, IssueType.BUSINESSRULE, thePathStack, !question.getRequired(), "Missing answer to required question with linkId[{0}]", linkId);
} else {
hint(theErrors, IssueType.BUSINESSRULE, thePathStack, !theQuestion.getRequired(), "Missing answer to required question with linkId[{0}]", linkId);
hint(theErrors, IssueType.BUSINESSRULE, thePathStack, !question.getRequired(), "Missing answer to required question with linkId[{0}]", linkId);
}
return;
}
org.hl7.fhir.instance.model.QuestionnaireAnswers.QuestionComponent answerQuestion = answers.get(0);
try {
thePathStack.add("question(" + answers.indexOf(answerQuestion) + ")");
validateQuestionAnswers(theErrors, theQuestion, thePathStack, type, answerQuestion, theAnswers, theValidateRequired);
validateQuestionGroups(theErrors, theQuestion, answerQuestion, thePathStack, theAnswers, theValidateRequired);
thePathStack.add("question[" + answers.indexOf(answerQuestion) + "]");
validateQuestionAnswers(theErrors, question, thePathStack, type, answerQuestion, theAnswers, theValidateRequired);
validateQuestionGroups(theErrors, question, answerQuestion, thePathStack, theAnswers, theValidateRequired);
} finally {
thePathStack.removeLast();
}
}
private void validateQuestionGroups(List<ValidationMessage> theErrors, QuestionComponent theQuestion, org.hl7.fhir.instance.model.QuestionnaireAnswers.QuestionComponent theAnswerQuestion,
LinkedList<String> thePathSpec, QuestionnaireAnswers theAnswers, boolean theValidateRequired) {
validateGroups(theErrors, theQuestion.getGroup(), theAnswerQuestion.getGroup(), thePathSpec, theAnswers, theValidateRequired);
@ -230,6 +269,14 @@ public class QuestionnaireAnswersValidator extends BaseValidator {
private void validateGroups(List<ValidationMessage> theErrors, List<GroupComponent> theQuestionGroups, List<org.hl7.fhir.instance.model.QuestionnaireAnswers.GroupComponent> theAnswerGroups,
LinkedList<String> thePathStack, QuestionnaireAnswers theAnswers, boolean theValidateRequired) {
Set<String> linkIds = new HashSet<String>();
for (GroupComponent nextQuestionGroup : theQuestionGroups) {
String nextLinkId = StringUtils.defaultString(nextQuestionGroup.getLinkId());
if (!linkIds.add(nextLinkId)) {
rule(theErrors, IssueType.BUSINESSRULE, thePathStack, false, "Questionnaire in invalid, unable to validate QuestionnaireAnswers: Multiple groups found at this position with linkId[{0}]", nextLinkId);
}
}
Set<String> allowedGroups = new HashSet<String>();
for (GroupComponent nextQuestionGroup : theQuestionGroups) {
String linkId = nextQuestionGroup.getLinkId();
@ -249,14 +296,14 @@ public class QuestionnaireAnswersValidator extends BaseValidator {
if (answerGroups.size() > 1) {
if (nextQuestionGroup.getRepeats() == false) {
int index = theAnswerGroups.indexOf(answerGroups.get(1));
thePathStack.add("group(" + index + ")");
thePathStack.add("group[" + index + "]");
rule(theErrors, IssueType.BUSINESSRULE, thePathStack, false, "Multiple repetitions of group with linkId[{0}] found at this position, but this group can not repeat", linkId);
thePathStack.removeLast();
}
}
for (org.hl7.fhir.instance.model.QuestionnaireAnswers.GroupComponent nextAnswerGroup : answerGroups) {
int index = theAnswerGroups.indexOf(answerGroups.get(1));
thePathStack.add("group(" + index + ")");
int index = theAnswerGroups.indexOf(nextAnswerGroup);
thePathStack.add("group[" + index + "]");
validateGroup(theErrors, nextQuestionGroup, nextAnswerGroup, thePathStack, theAnswers, theValidateRequired);
thePathStack.removeLast();
}
@ -267,7 +314,7 @@ public class QuestionnaireAnswersValidator extends BaseValidator {
for (org.hl7.fhir.instance.model.QuestionnaireAnswers.GroupComponent next : theAnswerGroups) {
idx++;
if (!allowedGroups.contains(next.getLinkId())) {
thePathStack.add("group(" + idx + ")");
thePathStack.add("group[" + idx + "]");
rule(theErrors, IssueType.BUSINESSRULE, thePathStack, false, "Group with linkId[{0}] found at this position, but this group does not exist at this position in Questionnaire",
next.getLinkId());
thePathStack.removeLast();
@ -296,11 +343,11 @@ public class QuestionnaireAnswersValidator extends BaseValidator {
for (QuestionAnswerComponent nextAnswer : answerQuestion.getAnswer()) {
answerIdx++;
try {
thePathStack.add("answer(" + answerIdx + ")");
thePathStack.add("answer[" + answerIdx + "]");
Type nextValue = nextAnswer.getValue();
if (!allowedAnswerTypes.contains(nextValue.getClass())) {
rule(theErrors, IssueType.BUSINESSRULE, thePathStack, false, "Answer to question with linkId[{0}] found of type [{1}] but this is invalid for question of type [{2}]", linkId, nextValue
.getClass().getSimpleName(), type.toCode());
rule(theErrors, IssueType.BUSINESSRULE, thePathStack, false, "Answer to question with linkId[{0}] found of type [{1}] but this is invalid for question of type [{2}]", linkId,
nextValue.getClass().getSimpleName(), type.toCode());
continue;
}
@ -340,6 +387,21 @@ public class QuestionnaireAnswersValidator extends BaseValidator {
}
}
}
if (!found) {
for (ConceptSetComponent nextCompose : valueSet.getCompose().getInclude()) {
if (coding.getSystem().equals(nextCompose.getSystem())) {
for (ConceptReferenceComponent next : nextCompose.getConcept()) {
if (coding.getCode().equals(next.getCode())) {
found = true;
break;
}
}
}
if (found) {
break;
}
}
}
rule(theErrors, IssueType.BUSINESSRULE, thePathStack, found, "Question with linkId[{0}] has answer with system[{1}] and code[{2}] but this is not a valid answer for ValueSet[{3}]",
linkId, coding.getSystem(), coding.getCode(), optionsRef);

View File

@ -52,7 +52,8 @@ public class ValidationMessage {
Schematron,
Publisher,
Ontology,
ProfileComparer
ProfileComparer,
QuestionnaireAnswersValidator
}
//@formatter:on

View File

@ -1,5 +1,6 @@
package ca.uhn.fhir.validation;
import static org.hamcrest.Matchers.any;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.junit.Assert.*;
@ -7,6 +8,8 @@ import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;
@ -20,6 +23,7 @@ import org.hl7.fhir.instance.model.QuestionnaireAnswers;
import org.hl7.fhir.instance.model.Reference;
import org.hl7.fhir.instance.model.StringType;
import org.hl7.fhir.instance.model.ValueSet;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.Questionnaire.AnswerFormat;
import org.hl7.fhir.instance.model.QuestionnaireAnswers.QuestionnaireAnswersStatus;
import org.hl7.fhir.instance.utils.WorkerContext;
@ -28,6 +32,8 @@ import org.hl7.fhir.instance.validation.ValidationMessage;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.model.primitive.IntegerDt;
@ -122,7 +128,7 @@ public class QuestionnaireAnswersValidatorIntegrationTest {
ourLog.info(result.getMessages().toString());
assertThat(result.getMessages().toString(), containsString("Missing answer to required question with linkId[link0],mySeverity=error"));
}
}
}
@Test
public void testCodedAnswer() {
@ -153,13 +159,22 @@ public class QuestionnaireAnswersValidatorIntegrationTest {
qa.getGroup().addQuestion().setLinkId("link0").addAnswer().setValue(new Coding().setSystem("urn:system").setCode("code1"));
result = myVal.validateWithResult(qa);
ourLog.info(result.getMessages().toString());
assertThat(result.getMessages().toString(), containsString("myLocationString=QuestionnaireAnswers.group(0).question(0).answer(0)"));
assertThat(result.getMessages().toString(), containsString("myLocationString=//QuestionnaireAnswers/group[0]/question[0]/answer[0]"));
assertThat(result.getMessages().toString(),
containsString("myMessage=Question with linkId[link0] has answer with system[urn:system] and code[code1] but this is not a valid answer for ValueSet[http://somevalueset/ValueSet/123]"));
result.toOperationOutcome();
}
@Test
public void testInvalidReference() {
QuestionnaireAnswers qa = new QuestionnaireAnswers();
qa.getQuestionnaire().setReference("someReference"); // not relative
ValidationResult result = myVal.validateWithResult(qa);
assertEquals(result.getMessages().toString(), 1, result.getMessages().size());
assertThat(result.getMessages().toString(), containsString("Invalid reference 'someReference"));
}
@Test
public void testUnknownValueSet() {
String questionnaireRef = "http://example.com/Questionnaire/q1";
@ -181,4 +196,39 @@ public class QuestionnaireAnswersValidatorIntegrationTest {
assertThat(result.getMessages().toString(), containsString("myMessage=Reference could not be found: http://some"));
}
/**
* Sample provided by Eric van der Zwan
*/
@SuppressWarnings("unchecked")
@Test
public void testSampleQuestionnaire() {
when(myResourceLoaderMock.load(Mockito.any(Class.class), Mockito.any(IdType.class))).thenAnswer(new Answer<IBaseResource>() {
@Override
public IBaseResource answer(InvocationOnMock theInvocation) throws Throwable {
IdType id = (IdType) theInvocation.getArguments()[1];
String name = "/nice/" + id.getIdPart() + ".xml";
InputStream in = getClass().getResourceAsStream(name);
if (in == null) {
throw new IllegalArgumentException(name);
}
InputStreamReader reader = new InputStreamReader(in);
String body = IOUtils.toString(reader);
if (Questionnaire.class.equals(theInvocation.getArguments()[0])) {
return ourCtx.newXmlParser().parseResource(Questionnaire.class, body);
} else if (ValueSet.class.equals(theInvocation.getArguments()[0])) {
return ourCtx.newXmlParser().parseResource(ValueSet.class, body);
} else {
throw new IllegalArgumentException(id.getValue());
}
}
});
QuestionnaireAnswers qa = ourCtx.newXmlParser().parseResource(QuestionnaireAnswers.class, new InputStreamReader(getClass().getResourceAsStream("/nice/answer-1-admission.xml")));
ValidationResult result = myVal.validateWithResult(qa);
ourLog.info(result.getMessages().toString());
assertThat(result.getMessages().toString(), containsString("Answer to question with linkId[partialBSN] found of type [IntegerType] but this is invalid for question of type [string]"));
}
}

View File

@ -1,7 +1,8 @@
package ca.uhn.fhir.validation;
import static org.hamcrest.Matchers.containsString;
import static org.junit.Assert.*;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import java.io.IOException;
import java.util.ArrayList;
@ -9,13 +10,15 @@ import java.util.List;
import org.apache.commons.io.IOUtils;
import org.hl7.fhir.instance.model.Coding;
import org.hl7.fhir.instance.model.DataElement;
import org.hl7.fhir.instance.model.Questionnaire;
import org.hl7.fhir.instance.model.Questionnaire.AnswerFormat;
import org.hl7.fhir.instance.model.Questionnaire.GroupComponent;
import org.hl7.fhir.instance.model.QuestionnaireAnswers;
import org.hl7.fhir.instance.model.QuestionnaireAnswers.QuestionnaireAnswersStatus;
import org.hl7.fhir.instance.model.Reference;
import org.hl7.fhir.instance.model.StringType;
import org.hl7.fhir.instance.model.ValueSet;
import org.hl7.fhir.instance.model.Questionnaire.AnswerFormat;
import org.hl7.fhir.instance.model.QuestionnaireAnswers.QuestionnaireAnswersStatus;
import org.hl7.fhir.instance.utils.WorkerContext;
import org.hl7.fhir.instance.validation.QuestionnaireAnswersValidator;
import org.hl7.fhir.instance.validation.ValidationMessage;
@ -23,6 +26,7 @@ import org.junit.Before;
import org.junit.Test;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.parser.DataFormatException;
public class QuestionnaireAnswersValidatorTest {
private static final FhirContext ourCtx = FhirContext.forDstu2Hl7Org();
@ -55,6 +59,62 @@ public class QuestionnaireAnswersValidatorTest {
assertThat(errors.toString(), containsString("Answer to question with linkId[link0] found of type [StringType] but this is invalid for question of type [boolean]"));
}
@Test
public void testExtensionDereference() throws Exception {
Questionnaire q = ourCtx.newJsonParser().parseResource(Questionnaire.class, IOUtils.toString(getClass().getResourceAsStream("/dereference-q.json")));
QuestionnaireAnswers qa = ourCtx.newXmlParser().parseResource(QuestionnaireAnswers.class, IOUtils.toString(getClass().getResourceAsStream("/dereference-qa.xml")));
DataElement de = ourCtx.newJsonParser().parseResource(DataElement.class, IOUtils.toString(getClass().getResourceAsStream("/dereference-de.json")));
myWorkerCtx.getQuestionnaires().put(qa.getQuestionnaire().getReference(), q);
myWorkerCtx.getDataElements().put("DataElement/4771", de);
List<ValidationMessage> errors = new ArrayList<ValidationMessage>();
myVal.validate(errors, qa);
ourLog.info(errors.toString());
assertEquals(errors.toString(), errors.size(), 0);
}
@Test
public void testGroupWithNoLinkIdInQuestionnaireAnswers() {
Questionnaire q = new Questionnaire();
GroupComponent qGroup = q.getGroup().addGroup();
qGroup.addQuestion().setLinkId("link0").setRequired(true).setType(AnswerFormat.BOOLEAN);
QuestionnaireAnswers qa = new QuestionnaireAnswers();
qa.getQuestionnaire().setReference("http://example.com/Questionnaire/q1");
org.hl7.fhir.instance.model.QuestionnaireAnswers.GroupComponent qaGroup = qa.getGroup().addGroup();
qaGroup.addQuestion().setLinkId("link0").addAnswer().setValue(new StringType("FOO"));
myWorkerCtx.getQuestionnaires().put(qa.getQuestionnaire().getReference(), q);
List<ValidationMessage> errors = new ArrayList<ValidationMessage>();
myVal.validate(errors, qa);
ourLog.info(errors.toString());
assertThat(errors.toString(), containsString("Answer to question with linkId[link0] found of type [StringType] but this is invalid for question of type [boolean]"));
}
@Test
public void testMultipleGroupsWithNoLinkIdInQuestionnaireAnswers() {
Questionnaire q = new Questionnaire();
GroupComponent qGroup = q.getGroup().addGroup();
qGroup.addQuestion().setLinkId("link0").setRequired(true).setType(AnswerFormat.BOOLEAN);
GroupComponent qGroup2 = q.getGroup().addGroup();
qGroup2.addQuestion().setLinkId("link1").setRequired(true).setType(AnswerFormat.BOOLEAN);
QuestionnaireAnswers qa = new QuestionnaireAnswers();
qa.getQuestionnaire().setReference("http://example.com/Questionnaire/q1");
org.hl7.fhir.instance.model.QuestionnaireAnswers.GroupComponent qaGroup = qa.getGroup().addGroup();
qaGroup.addQuestion().setLinkId("link0").addAnswer().setValue(new StringType("FOO"));
myWorkerCtx.getQuestionnaires().put(qa.getQuestionnaire().getReference(), q);
List<ValidationMessage> errors = new ArrayList<ValidationMessage>();
myVal.validate(errors, qa);
ourLog.info(errors.toString());
assertThat(errors.toString(), containsString("Questionnaire in invalid, unable to validate QuestionnaireAnswers: Multiple groups found at this position with linkId[]"));
}
@Test
public void testCodedAnswer() {
String questionnaireRef = "http://example.com/Questionnaire/q1";
@ -65,6 +125,7 @@ public class QuestionnaireAnswersValidatorTest {
ValueSet options = new ValueSet();
options.getCodeSystem().setSystem("urn:system").addConcept().setCode("code0");
options.getCompose().addInclude().setSystem("urn:system2").addConcept().setCode("code2");
myWorkerCtx.getValueSets().put("http://somevalueset", options);
QuestionnaireAnswers qa;
@ -78,7 +139,14 @@ public class QuestionnaireAnswersValidatorTest {
errors = new ArrayList<ValidationMessage>();
myVal.validate(errors, qa);
assertEquals(errors.toString(), 0, errors.size());
qa = new QuestionnaireAnswers();
qa.getQuestionnaire().setReference(questionnaireRef);
qa.getGroup().addQuestion().setLinkId("link0").addAnswer().setValue(new Coding().setSystem("urn:system2").setCode("code2"));
errors = new ArrayList<ValidationMessage>();
myVal.validate(errors, qa);
assertEquals(errors.toString(), 0, errors.size());
// Bad code
qa = new QuestionnaireAnswers();
@ -87,8 +155,19 @@ public class QuestionnaireAnswersValidatorTest {
errors = new ArrayList<ValidationMessage>();
myVal.validate(errors, qa);
ourLog.info(errors.toString());
assertThat(errors.toString(), containsString("location=QuestionnaireAnswers.group(0).question(0).answer(0)"));
assertThat(errors.toString(), containsString("location=//QuestionnaireAnswers/group[0]/question[0]/answer[0]"));
assertThat(errors.toString(), containsString("message=Question with linkId[link0] has answer with system[urn:system] and code[code1] but this is not a valid answer for ValueSet[http://somevalueset]"));
qa = new QuestionnaireAnswers();
qa.getQuestionnaire().setReference(questionnaireRef);
qa.getGroup().addQuestion().setLinkId("link0").addAnswer().setValue(new Coding().setSystem("urn:system2").setCode("code3"));
errors = new ArrayList<ValidationMessage>();
myVal.validate(errors, qa);
ourLog.info(errors.toString());
assertThat(errors.toString(), containsString("location=//QuestionnaireAnswers/group[0]/question[0]/answer[0]"));
assertThat(errors.toString(), containsString("message=Question with linkId[link0] has answer with system[urn:system2] and code[code3] but this is not a valid answer for ValueSet[http://somevalueset]"));
}
@ -126,7 +205,7 @@ public class QuestionnaireAnswersValidatorTest {
myVal.validate(errors, qa);
ourLog.info(errors.toString());
assertThat(errors.toString(), containsString("location=QuestionnaireAnswers.group(0).question"));
assertThat(errors.toString(), containsString("location=//QuestionnaireAnswers/group[0]/question[0]"));
assertThat(errors.toString(), containsString("message=Found answer with linkId[link1] but this ID is not allowed at this position"));
}
@ -144,7 +223,7 @@ public class QuestionnaireAnswersValidatorTest {
myVal.validate(errors, qa);
ourLog.info(errors.toString());
assertThat(errors.toString(), containsString("location=QuestionnaireAnswers.group(0).group(0)"));
assertThat(errors.toString(), containsString("location=//QuestionnaireAnswers/group[0]/group[0]"));
assertThat(errors.toString(), containsString("Group with linkId[link1] found at this position, but this group does not exist at this position in Questionnaire"));
}

View File

@ -0,0 +1,30 @@
{
"resourceType":"DataElement",
"id":"4770",
"meta":{
"versionId":"1",
"lastUpdated":"2015-07-09T03:28:33.831-04:00"
},
"text":{
"status":"generated",
"div":"<div xmlns=\"http://www.w3.org/1999/xhtml\"><!-- Snipped for brevity --></div>"
},
"identifier":{
"value":"Age Question"
},
"version":"1.0",
"name":"QuestionSample",
"status":"active",
"publisher":"EDIFECS",
"element":[
{
"path":"DataElement.question",
"label":"What is your age?",
"type":[
{
"code":"positiveInt"
}
]
}
]
}

View File

@ -0,0 +1,63 @@
{
"resourceType":"Questionnaire",
"id":"4772",
"meta":{
"versionId":"1",
"lastUpdated":"2015-07-09T03:30:29.589-04:00"
},
"identifier":[
{
"value":"My First Questionnaire"
}
],
"date":"2015-07-09T11:32:03+04:00",
"publisher":"Edifecs",
"group":{
"title":"Main Group",
"group":[
{
"text":"Common",
"question":[
{
"linkId":"Link0",
"text":"What is your name?",
"type":"string"
},
{
"linkId":"Link1",
"text":"What is your age?",
"type":"integer"
},
{
"linkId":"Link2",
"text":"Do you smoke?",
"type":"boolean"
},
{
"extension":[
{
"url":"http://hl7.org/fhir/StructureDefinition/questionnaire-deReference",
"valueReference":{
"reference":"DataElement/4770"
}
}
],
"linkId":"Link3"
},
{
"extension":[
{
"url":"http://hl7.org/fhir/StructureDefinition/questionnaire-deReference",
"valueReference":{
"reference":"DataElement/4771"
}
}
],
"linkId":"Link4"
}
]
}
]
}
}

View File

@ -0,0 +1,53 @@
<QuestionnaireAnswers xmlns="http://hl7.org/fhir">
<identifier>
<value value="4772"/>
</identifier>
<questionnaire>
<reference value="Questionnaire/4772"/>
</questionnaire>
<subject>
<reference value="Organization/4769"/>
</subject>
<authored value="2015-07-29T20:39:17+04:00"/>
<group>
<title value="Main Group"/>
<group>
<text value="Common"/>
<question>
<linkId value="Link0"/>
<text value="What is your name?"/>
<answer>
<valueString value="swewe"/>
</answer>
</question>
<question>
<linkId value="Link1"/>
<text value="What is your age?"/>
<answer>
<valueInteger value="1"/>
</answer>
</question>
<question>
<linkId value="Link2"/>
<text value="Do you smoke?"/>
<answer>
<valueBoolean value="false"/>
</answer>
</question>
<question>
<linkId value="Link3"/>
<text value="What is your age?"/>
<answer>
<valueInteger value="23"/>
</answer>
</question>
<question>
<linkId value="Link4"/>
<text value="Do you smoke?"/>
<answer>
<valueBoolean value="false"/>
</answer>
</question>
</group>
</group>
</QuestionnaireAnswers>

View File

@ -0,0 +1 @@
<ValueSet xmlns="http://hl7.org/fhir"><id value="104"/><define><concept><code value="1"/><definition value="Geen reactie"/></concept><concept><code value="2"/><definition value="Reactie op pijnprikkel"/></concept><concept><code value="3"/><definition value="Reactie op verbale prikkel"/></concept><concept><code value="4"/><definition value="Spontane reactie"/></concept></define></ValueSet>

View File

@ -0,0 +1 @@
<ValueSet xmlns="http://hl7.org/fhir"><id value="198"/><define><concept><code value="1"/><definition value="Geen reactie"/></concept><concept><code value="2"/><definition value="Strekken"/></concept><concept><code value="3"/><definition value="Decorticatie reflex (abnormaal buigen)"/></concept><concept><code value="4"/><definition value="Spastische reactie (terugtrekken)"/></concept><concept><code value="5"/><definition value="Lokaliseert pijn"/></concept><concept><code value="6"/><definition value="Volgt verbale commando's op"/></concept></define></ValueSet>

View File

@ -0,0 +1 @@
<ValueSet xmlns="http://hl7.org/fhir"><id value="435"/><define><concept><code value="1"/><definition value="Geen reactie"/></concept><concept><code value="2"/><definition value="Onbegrijpelijke geluiden"/></concept><concept><code value="3"/><definition value="Onduidelijke woorden"/></concept><concept><code value="4"/><definition value="Verwarde conversatie"/></concept><concept><code value="6"/><definition value="Helder en adequaat"/></concept></define></ValueSet>

View File

@ -0,0 +1 @@
<ValueSet xmlns="http://hl7.org/fhir"><id value="AdmissionSource"/><extensible value="false"/><define><concept><code value="1"/><definition value="Operatiekamer vanaf verpleegafdeling zelfde ziekenhuis"/></concept><concept><code value="2"/><definition value="Operatiekamer vanaf Eerste Hulp afdeling zelfde ziekenhuis"/></concept><concept><code value="3"/><definition value="Eerste Hulp afdeling zelfde ziekenhuis"/></concept><concept><code value="4"/><definition value="Verpleegafdeling zelfde ziekenhuis"/></concept><concept><code value="5"/><definition value="CCU/IC zelfde ziekenhuis"/></concept><concept><code value="6"/><definition value="Recovery zelfde ziekenhuis (alleen bij niet geplande IC-opname)"/></concept><concept><code value="7"/><definition value="Special/Medium care zelfde ziekenhuis"/></concept><concept><code value="8"/><definition value="Operatiekamer vanaf verpleegafdeling ander ziekenhuis"/></concept><concept><code value="9"/><definition value="Operatiekamer vanaf Eerste Hulp afdeling ander ziekenhuis"/></concept><concept><code value="10"/><definition value="Eerste Hulp afdeling ander ziekenhuis"/></concept><concept><code value="11"/><definition value="Verpleegafdeling ander ziekenhuis"/></concept><concept><code value="12"/><definition value="CCU/IC ander ziekenhuis"/></concept><concept><code value="13"/><definition value="Recovery ander ziekenhuis"/></concept><concept><code value="14"/><definition value="Special/Medium care ander ziekenhuis"/></concept><concept><code value="15"/><definition value="Huis"/></concept><concept><code value="16"/><definition value="Anders"/></concept><concept><code value="17"/><definition value="Andere locatie zelfde ziekenhuis, transport per ambulance"/></concept></define></ValueSet>

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
<ValueSet xmlns="http://hl7.org/fhir"><id value="Discharged_to"/><extensible value="false"/><define><concept><code value="1"/><definition value="verpleegafdeling zelfde ziekenhuis (inclusief andere lokatie)"/></concept><concept><code value="2"/><definition value="CCU/ICU zelfde ziekenhuis"/></concept><concept><code value="3"/><definition value="Recovery/Medium Care zelfde ziekenhuis"/></concept><concept><code value="4"/><definition value="Verpleegafdeling ander ziekenhuis"/></concept><concept><code value="5"/><definition value="CCU/ICU ander ziekenhuis"/></concept><concept><code value="6"/><definition value="Recovery/Medium Care ander ziekenhuis"/></concept><concept><code value="7"/><definition value="Mortuarium"/></concept><concept><code value="8"/><definition value="Huis"/></concept><concept><code value="9"/><definition value="Anders"/></concept></define></ValueSet>

View File

@ -0,0 +1,91 @@
<?xml version="1.0" encoding="UTF-8"?>
<QuestionnaireAnswers xmlns="http://hl7.org/fhir">
<text>
First message at the moment of admission.
</text>
<questionnaire>
<reference value="Questionnaire/nice-pilot-questionnaire" /> <!-- Should reference the nice-pilot-questionnaire.xml -->
</questionnaire>
<status value="in-progress"/>
<group>
<linkId value="NICE-FHIR-Pilot"/>
<!-- 1 -->
<group>
<linkId value="opname identificatie"/>
<question>
<linkId value="hospno"/>
<answer>
<valueInteger value="88"/>
</answer>
</question>
<question>
<linkId value="icno"/>
<answer>
<valueInteger value="1"/>
</answer>
</question>
<question>
<linkId value="admno"/>
<answer>
<valueInteger value="1239827"/>
</answer>
</question>
</group>
<!-- 2 -->
<group>
<linkId value="patientinformation" />
<question>
<linkId value="patno"/>
<answer>
<valueString value="KSJDHFSDFMNCVXJK"/>
</answer>
</question>
<question>
<linkId value="partialBSN"/>
<answer>
<valueInteger value="1232456"/>
</answer>
</question>
<question>
<linkId value="date_of_birth"/>
<answer>
<valueDate value="1964-03-09"/>
</answer>
</question>
</group>
<!-- 3 -->
<group>
<linkId value="opnamegegevens"/>
<question>
<linkId value="adm_hosp"/>
<answer>
<valueDateTime value="2015-07-21T14:04:00+01:00"/>
</answer>
</question>
<question>
<linkId value="adm_icu"/>
<answer>
<valueDateTime value="2015-07-21T14:04:00+01:00"/>
</answer>
</question>
<question>
<linkId value="adm_source"/>
<answer>
<valueInteger value="1"/>
</answer>
</question>
<question>
<linkId value="cardio_vas_insuf"/>
<answer>
<valueBoolean value="true"/>
</answer>
</question>
</group>
</group>
</QuestionnaireAnswers>

View File

@ -0,0 +1,181 @@
<?xml version="1.0" encoding="UTF-8"?>
<QuestionnaireAnswers xmlns="http://hl7.org/fhir">
<text>
Most of the items requested by NICE is collected within the first 24hours of ICU admission.
Therefore we would like an update of the message with these values.
</text>
<questionnaire>
<reference value="Questionnaire/nice-pilot-questionnaire" /> <!-- Should reference the nice-pilot-questionnaire.xml -->
</questionnaire>
<status value="in-progress"/>
<group>
<linkId value="NICE-FHIR-Pilot"/>
<!-- 1 -->
<group>
<linkId value="opname identificatie"/>
<question>
<linkId value="hospno"/>
<answer>
<valueInteger value="88"/>
</answer>
</question>
<question>
<linkId value="icno"/>
<answer>
<valueInteger value="1"/>
</answer>
</question>
<question>
<linkId value="admno"/>
<answer>
<valueInteger value="1239827"/>
</answer>
</question>
</group>
<!-- 2 -->
<group>
<linkId value="patientinformation" />
<question>
<linkId value="patno"/>
<answer>
<valueString value="KSJDHFSDFMNCVXJK"/>
</answer>
</question>
<question>
<linkId value="partialBSN"/>
<answer>
<valueInteger value="1232456"/>
</answer>
</question>
<question>
<linkId value="date_of_birth"/>
<answer>
<valueDate value="1964-03-09"/>
</answer>
</question>
</group>
<!-- 3 -->
<group>
<linkId value="opnamegegevens"/>
<question>
<linkId value="adm_hosp"/>
<answer>
<valueDateTime value="2015-07-21T14:04:00+01:00"/>
</answer>
</question>
<question>
<linkId value="adm_icu"/>
<answer>
<valueDateTime value="2015-07-21T14:04:00+01:00"/>
</answer>
</question>
<question>
<linkId value="adm_source"/>
<answer>
<valueInteger value="1"/>
</answer>
</question>
<question>
<linkId value="cardio_vas_insuf"/>
<answer>
<valueBoolean value="true"/>
</answer>
</question>
</group>
<!-- 4 -->
<group>
<linkId value="1ste 24uur na opname"/>
<question>
<linkId value="ap4diag1"/>
<answer>
<valueInteger value="129"/> <!-- Guillian-Barre syndrome -->
</answer>
</question>
<question>
<linkId value="ap4diag2"/>
<answer>
<valueInteger value="246"/> <!-- CABG with other operation -->
</answer>
</question>
<question>
<linkId value="meanbl_max"/>
<answer>
<valueQuantity>
<value value="240" />
<units value="mmHg" />
<system value="http://unitsofmeasure.org" />
<code value="mm[Hg]" />
</valueQuantity>
</answer>
</question>
<question>
<linkId value="meanbl_min"/>
<answer>
<valueQuantity>
<value value="11" />
<units value="mmHg" />
<system value="http://unitsofmeasure.org" />
<code value="mm[Hg]" />
</valueQuantity>
</answer>
</question>
<question>
<linkId value="creat_max"/>
<answer>
<valueDecimal value="287"/>
</answer>
<!-- should be in umol/l -->
</question>
<question>
<linkId value="creat_min"/>
<answer>
<valueDecimal value="99"/>
</answer>
<!-- should be in umol/l -->
</question>
<question>
<linkId value="eye_24"/>
<type value="choice"/>
<answer>
<valueInteger value="2" />
</answer>
<!-- could or should i use loinc?
<code>
<coding>
<system value="http://loinc.org"/>
<code value="9267-6"/>
<display value="Glasgow coma score eye opening"/>
</coding>
<text value="Eyes"/>
</code>
-->
</question>
<question>
<linkId value="motor_24"/>
<answer>
<ValueInteger value="3"/>
</answer>
</question>
<question>
<linkId value="verbal_24"/>
<answer>
<valueInteger value="1"/>
</answer>
<!-- could or should i use loinc?
<code>
<coding>
<system value="http://loinc.org"/>
<code value="9270-0"/>
<display value="Glasgow coma score verbal"/>
</coding>
<text value="Verbal"/>
</code> -->
</question>
</group>
</group>
</QuestionnaireAnswers>

View File

@ -0,0 +1,198 @@
<?xml version="1.0" encoding="UTF-8"?>
<QuestionnaireAnswers xmlns="http://hl7.org/fhir">
<text>
At the end of an ICU admission, we want the discharge information.
</text>
<questionnaire>
<reference value="Questionnaire/nice-pilot-questionnaire" /> <!-- Should reference the nice-pilot-questionnaire.xml -->
</questionnaire>
<status value="completed "/>
<group>
<linkId value="NICE-FHIR-Pilot"/>
<!-- 1 -->
<group>
<linkId value="opname identificatie"/>
<question>
<linkId value="hospno"/>
<answer>
<valueInteger value="88"/>
</answer>
</question>
<question>
<linkId value="icno"/>
<answer>
<valueInteger value="1"/>
</answer>
</question>
<question>
<linkId value="admno"/>
<answer>
<valueInteger value="1239827"/>
</answer>
</question>
</group>
<!-- 2 -->
<group>
<linkId value="patientinformation" />
<question>
<linkId value="patno"/>
<answer>
<valueString value="KSJDHFSDFMNCVXJK"/>
</answer>
</question>
<question>
<linkId value="partialBSN"/>
<answer>
<valueInteger value="1232456"/>
</answer>
</question>
<question>
<linkId value="date_of_birth"/>
<answer>
<valueDate value="1964-03-09"/>
</answer>
</question>
</group>
<!-- 3 -->
<group>
<linkId value="opnamegegevens"/>
<question>
<linkId value="adm_hosp"/>
<answer>
<valueDateTime value="2015-07-21T14:04:00+01:00"/>
</answer>
</question>
<question>
<linkId value="adm_icu"/>
<answer>
<valueDateTime value="2015-07-21T14:04:00+01:00"/>
</answer>
</question>
<question>
<linkId value="adm_source"/>
<answer>
<valueInteger value="1"/>
</answer>
</question>
<question>
<linkId value="cardio_vas_insuf"/>
<answer>
<valueBoolean value="true"/>
</answer>
</question>
</group>
<!-- 4 -->
<group>
<linkId value="1ste 24uur na opname"/>
<question>
<linkId value="ap4diag1"/>
<answer>
<valueInteger value="129"/> <!-- Guillian-Barre syndrome -->
</answer>
</question>
<question>
<linkId value="ap4diag2"/>
<answer>
<valueInteger value="246"/> <!-- CABG with other operation -->
</answer>
</question>
<question>
<linkId value="meanbl_max"/>
<answer>
<valueQuantity>
<value value="240" />
<units value="mmHg" />
<system value="http://unitsofmeasure.org" />
<code value="mm[Hg]" />
</valueQuantity>
</answer>
</question>
<question>
<linkId value="meanbl_min"/>
<answer>
<valueQuantity>
<value value="11" />
<units value="mmHg" />
<system value="http://unitsofmeasure.org" />
<code value="mm[Hg]" />
</valueQuantity>
</answer>
</question>
<question>
<linkId value="creat_max"/>
<answer>
<valueDecimal value="287"/>
</answer>
<!-- should be in umol/l -->
</question>
<question>
<linkId value="creat_min"/>
<answer>
<valueDecimal value="99"/>
</answer>
<!-- should be in umol/l -->
</question>
<question>
<linkId value="eye_24"/>
<type value="choice"/>
<answer>
<valueInteger value="2" />
</answer>
<!-- could or should i use loinc?
<code>
<coding>
<system value="http://loinc.org"/>
<code value="9267-6"/>
<display value="Glasgow coma score eye opening"/>
</coding>
<text value="Eyes"/>
</code>
-->
</question>
<question>
<linkId value="motor_24"/>
<answer>
<ValueInteger value="3"/>
</answer>
</question>
<question>
<linkId value="verbal_24"/>
<answer>
<valueInteger value="1"/>
</answer>
<!-- could or should i use loinc?
<code>
<coding>
<system value="http://loinc.org"/>
<code value="9270-0"/>
<display value="Glasgow coma score verbal"/>
</coding>
<text value="Verbal"/>
</code> -->
</question>
</group>
<!-- 5 -->
<group>
<linkId value="IC ontslaggegevens"/>
<question>
<linkId value="dis_icu"/>
<answer>
<valueDateTime value="2015-07-23T08:44:00+01:00"/>
</answer>
</question>
<question>
<linkId value="discharged_to"/>
<answer>
<valueInteger value="7"/> <!-- mortuary -->
</answer>
</question>
</group>
</group>
</QuestionnaireAnswers>

View File

@ -0,0 +1,251 @@
<?xml version="1.0" encoding="UTF-8"?>
<Questionnaire xmlns="http://hl7.org/fhir">
<text>
The Dutch National Intensive Care Evaluation (NICE) foundation
facilitates a registry to enable participating intensive care units to
quantify and improve the quality of care they offer. The NICE
foundation offers intensive care units feedback and benchmarking on
patient outcomes, including mortality and allows them to compare their
outcomes with those achieved nationally and in groups of similar
hospitals. The foundation provides each participating intensive care
unit with biannual quality reports and access to an online tool
enabling each intensive care unit to perform additional analyses on
their data at any time. It also publishes two magazines each year with
features based on analyses of registry data and organizes a national
conference to enable medical and nursing specialists to meet and
discuss their own and national results. In addition, the foundation
collects data to enable quality indicators developed by the
Netherlands Society of Intensive Care to be calculated.
The data is collected since 1996 until today using Ms Access files which
where send monthly. Now a pilot is setup to collect this data "realtime"
with FHIR.
In this questionnaire is a representative selection of items from the NICE definitions.
The language used in the texts, is dutch (sorry).
</text>
<identifier>
<use value="official"/>
<system value="http://stichting-nice.nl/pilot/fhir"/>
</identifier>
<status value="draft"/>
<group>
<linkId value="NICE-FHIR-Pilot"/>
<title value="NICE FHIR Pilot questionnaire"/>
<required value="true"/>
<repeats value="false"/>
<!-- 1 -->
<group>
<linkId value="opname identificatie"/>
<required value="true"/>
<repeats value="false"/>
<title value="Wat is de identificatie van de opname volgens de regels van NICE?"/>
<text value="Vrijwel alle gegevens die de NICE wil ontvangen, is gerelateerd aan een IC-opname. Deze wordt door de NICE
geidentificeerd met behulp van de combinatie ziekenhuisnummer, ic-nummer en het opnamenummer.
Dit onderdeel is dan ook verplicht voor elke opname gerelateerde questionnaire. De periodieke gegevens als
de jaargegevens, kwartaalgegevens en de dagelijkse fte’s en bedbezetting hebben een aangepaste sleutel. "/>
<required value="true"/>
<repeats value="false"/>
<question>
<linkId value="hospno"/>
<text value="Wat is het door NICE toegekende ziekenhuisnummer?"/>
<type value="integer"/>
<required value="true"/>
<repeats value="false"/>
</question>
<question>
<linkId value="icno"/>
<text value="Wat is het door NICE toegekende IC nummer voor de afdeling?"/>
<type value="integer"/>
<required value="true"/>
<repeats value="false"/>
</question>
<question>
<linkId value="admno"/>
<text value="Wat is het unieke nummer dat door de instelling is toegewezen aan deze opname?"/>
<type value="integer"/>
<required value="true"/>
<repeats value="false"/>
</question>
</group>
<!-- 2 -->
<group>
<!-- For now as questions, might want to use a reference to a patient resource -->
<linkId value="patientinformation" />
<text value="De gegevens die bekent zijn bij IC opname"/>
<required value="false"/>
<repeats value="false"/>
<question>
<linkId value="patno"/>
<text value="Wat is het NICE geencrypteerde patientnummer?"/>
<type value="string"/>
<required value="true"/>
</question>
<question>
<linkId value="partialBSN"/>
<text value="Wat zijn de eerste 6 cijfers van het BSN?"/>
<type value="string"/>
</question>
<question>
<linkId value="date_of_birth"/>
<text value="Wat is de geboortedatum van de patient?"/>
<type value="date"/>
<required value="true"/>
</question>
</group>
<!-- 3 -->
<group>
<linkId value="opnamegegevens"/>
<required value="false"/>
<repeats value="false"/>
<question>
<linkId value="adm_hosp"/>
<text value="Wat is de ziekenhuisopnamedatum ( optioneel: en -€“tijd)?"/>
<type value="dateTime"/>
</question>
<question>
<linkId value="adm_icu"/>
<text value="Wat is de IC opnamedatum en -€“tijd?"/>
<type value="dateTime"/>
<required value="true"/>
</question>
<question>
<linkId value="adm_source"/>
<text value="Wat is de herkomst van de patient direct voor IC-opname"/>
<type value="choice"/>
<options>
<reference value="https://stichting-nice.nl/pilot/fhir/ValueSet/AdmissionSource"/>
</options>
</question>
<question>
<linkId value="cardio_vas_insuf"/>
<text value="Angina of symptomen in rust of bij minimale inspanning (aankleden en verzorging)? (New York Heart Association klasse IV)."/>
<type value="boolean"/>
</question>
</group>
<!-- 4 -->
<group>
<linkId value="1ste 24uur na opname"/>
<required value="false"/>
<repeats value="false"/>
<text value="De gegevens over de 1ste 24uur van de IC opname"/>
<question>
<linkId value="ap4diag1"/>
<text value="Welke APACHE IV diagnose hoort bij deze IC opname?"/>
<type value="choice"/>
<options>
<!-- reference exists -->
<reference value="https://stichting-nice.nl/pilot/fhir/ValueSet/ApacheIVDiagnose"/>
</options>
</question>
<question>
<linkId value="ap4diag2"/>
<text value="Welke optionele 2de APACHE IV diagnose hoort bij deze IC opname?"/>
<type value="choice"/>
<options>
<!-- reference exists -->
<reference value="https://stichting-nice.nl/pilot/fhir/ValueSet/ApacheIVDiagnose"/>
</options>
</question>
<question>
<linkId value="meanbl_max"/>
<text value="Wat was de hoogste gemiddelde bloeddruk in de 1ste 24uur van IC opname?"/>
<type value="quantity"/>
<!-- should be in mmHg -->
</question>
<question>
<linkId value="meanbl_min"/>
<text value="Wat was de laagste gemiddelde bloeddruk in de 1ste 24uur van IC opname?"/>
<type value="quantity"/>
<!-- should be in mmHg -->
</question>
<question>
<linkId value="creat_max"/>
<text value="Wat was de hoogste waarde serum creatinine in de 1ste 24uur van IC opname?"/>
<type value="quantity"/>
<!-- should be in umol/l -->
</question>
<question>
<linkId value="creat_min"/>
<text value="Wat was de laagste waarde serum creatinine in de 1ste 24uur van IC opname?"/>
<type value="quantity"/>
<!-- should be in umol/l -->
</question>
<question>
<linkId value="eye_24"/>
<text value="Wat is de oogreactie 24uur na IC opname?"/>
<type value="choice"/>
<options>
<reference value="https://stichting-nice.nl/pilot/fhir/ValueSet/104"/>
</options>
<!-- could or should i use loinc?
<code>
<coding>
<system value="http://loinc.org"/>
<code value="9267-6"/>
<display value="Glasgow coma score eye opening"/>
</coding>
<text value="Eyes"/>
</code>
-->
</question>
<question>
<linkId value="motor_24"/>
<concept>
<coding>
<system value="http://loinc.org"/>
<code value="9268-4"/>
<display value="Glasgow coma score motor"/>
</coding>
</concept>
<text value="Wat is de motorische reactie 24uur na IC opname?"/>
<type value="choice"/>
<options>
<reference value="https://stichting-nice.nl/pilot/fhir/ValueSet/198"/>
</options>
</question>
<question>
<linkId value="verbal_24"/>
<text value="Wat is de verbale reactie 24uur na IC opname?"/>
<type value="choice"/>
<options>
<reference value="https://stichting-nice.nl/pilot/fhir/ValueSet/435"/>
</options>
<!-- could or should i use loinc?
<code>
<coding>
<system value="http://loinc.org"/>
<code value="9270-0"/>
<display value="Glasgow coma score verbal"/>
</coding>
<text value="Verbal"/>
</code> -->
</question>
</group>
<!-- 5 -->
<group>
<linkId value="IC ontslaggegevens"/>
<required value="false"/>
<repeats value="false"/>
<text value="De gegevens die bekend zijn na IC ontslag"/>
<question>
<linkId value="dis_icu"/>
<text value="Wat is de IC ontslagdatum en –tijd?"/>
<type value="dateTime"/>
</question>
<question>
<linkId value="discharged_to"/>
<text value="Wat is de ontslagbestemming?"/>
<type value="choice"/>
<options>
<reference value="https://stichting-nice.nl/pilot/fhir/ValueSet/Discharged_to"/>
</options>
</question>
</group>
</group>
</Questionnaire>

View File

@ -0,0 +1,195 @@
<?xml version="1.0" encoding="UTF-8"?>
<QuestionnaireAnswers xmlns="http://hl7.org/fhir">
<questionnaire>
<reference value="Questionnaire/nice-pilot-questionnaire" /> <!-- Should reference the nice-pilot-questionnaire.xml -->
</questionnaire>
<status value="in-progress"/>
<group>
<linkId value="NICE-FHIR-Pilot"/>
<!-- 1 -->
<group>
<linkId value="opname identificatie"/>
<question>
<linkId value="hospno"/>
<answer>
<valueInteger value="88"/>
</answer>
</question>
<question>
<linkId value="icno"/>
<answer>
<valueInteger value="1"/>
</answer>
</question>
<question>
<linkId value="admno"/>
<answer>
<valueInteger value="1239827"/>
</answer>
</question>
</group>
<!-- 2 -->
<group>
<linkId value="patientinformation" />
<question>
<linkId value="patno"/>
<answer>
<valueString value="KSJDHFSDFMNCVXJK"/>
</answer>
</question>
<question>
<linkId value="partialBSN"/>
<answer>
<valueInteger value="1232456"/>
</answer>
</question>
<question>
<linkId value="date_of_birth"/>
<answer>
<valueDate value="1964-03-09"/>
</answer>
</question>
</group>
<!-- 3 -->
<group>
<linkId value="opnamegegevens"/>
<question>
<linkId value="adm_hosp"/>
<answer>
<valueDateTime value="2015-07-21T14:04:00+01:00"/>
</answer>
</question>
<question>
<linkId value="adm_icu"/>
<answer>
<valueDateTime value="2015-07-21T14:04:00+01:00"/>
</answer>
</question>
<question>
<linkId value="adm_source"/>
<answer>
<valueInteger value="1"/>
</answer>
</question>
<question>
<linkId value="cardio_vas_insuf"/>
<answer>
<valueBoolean value="true"/>
</answer>
</question>
</group>
<!-- 4 -->
<group>
<linkId value="1ste 24uur na opname"/>
<question>
<linkId value="ap4diag1"/>
<answer>
<valueInteger value="129"/> <!-- Guillian-Barre syndrome -->
</answer>
</question>
<question>
<linkId value="ap4diag2"/>
<answer>
<valueInteger value="246"/> <!-- CABG with other operation -->
</answer>
</question>
<question>
<linkId value="meanbl_max"/>
<answer>
<valueQuantity>
<value value="240" />
<units value="mmHg" />
<system value="http://unitsofmeasure.org" />
<code value="mm[Hg]" />
</valueQuantity>
</answer>
</question>
<question>
<linkId value="meanbl_min"/>
<answer>
<valueQuantity>
<value value="11" />
<units value="mmHg" />
<system value="http://unitsofmeasure.org" />
<code value="mm[Hg]" />
</valueQuantity>
</answer>
</question>
<question>
<linkId value="creat_max"/>
<answer>
<valueDecimal value="287"/>
</answer>
<!-- should be in umol/l -->
</question>
<question>
<linkId value="creat_min"/>
<answer>
<valueDecimal value="99"/>
</answer>
<!-- should be in umol/l -->
</question>
<question>
<linkId value="eye_24"/>
<type value="choice"/>
<answer>
<valueInteger value="2" />
</answer>
<!-- could or should i use loinc?
<code>
<coding>
<system value="http://loinc.org"/>
<code value="9267-6"/>
<display value="Glasgow coma score eye opening"/>
</coding>
<text value="Eyes"/>
</code>
-->
</question>
<question>
<linkId value="motor_24"/>
<answer>
<ValueInteger value="3"/>
</answer>
</question>
<question>
<linkId value="verbal_24"/>
<answer>
<valueInteger value="1"/>
</answer>
<!-- could or should i use loinc?
<code>
<coding>
<system value="http://loinc.org"/>
<code value="9270-0"/>
<display value="Glasgow coma score verbal"/>
</coding>
<text value="Verbal"/>
</code> -->
</question>
</group>
<!-- 5 -->
<group>
<linkId value="IC ontslaggegevens"/>
<question>
<linkId value="dis_icu"/>
<answer>
<valueDateTime value="2015-07-23T08:44:00+01:00"/>
</answer>
</question>
<question>
<linkId value="discharged_to"/>
<answer>
<valueInteger value="7"/>
</answer>
</question>
</group>
</group>
</QuestionnaireAnswers>

View File

@ -49,6 +49,11 @@ public class ${className}ResourceProvider extends
@Description(shortDefinition="The resource language")
@OptionalParam(name="_language")
StringParam theResourceLanguage,
@Description(shortDefinition="Search for resources which have the given tag")
@OptionalParam(name="_tag")
TokenAndListParam theSearchForTag,
#foreach ( $param in $searchParams ) #{if}(true) #{end}
@Description(shortDefinition="${param.description}")
@ -113,10 +118,11 @@ public class ${className}ResourceProvider extends
startRequest(theServletRequest);
try {
SearchParameterMap paramMap = new SearchParameterMap();
paramMap.add("_id", theId);
paramMap.add("_language", theResourceLanguage);
paramMap.add("_id", theId);
paramMap.add("_language", theResourceLanguage);
paramMap.add("_tag", theSearchForTag);
#foreach ( $param in $searchParams )
paramMap.add("${param.name}", the${param.nameCapitalized});
paramMap.add("${param.name}", the${param.nameCapitalized});
#end
#if ( $version != 'dstu' )
paramMap.setRevIncludes(theRevIncludes);

View File

@ -1,35 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>restful-server-example</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.wst.common.project.facet.core.builder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.wst.validation.validationbuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.m2e.core.maven2Builder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.m2e.core.maven2Nature</nature>
<nature>org.eclipse.wst.common.project.facet.core.nature</nature>
<nature>org.eclipse.wst.common.modulecore.ModuleCoreNature</nature>
</natures>
</projectDescription>

View File

@ -53,6 +53,20 @@
Resources and datatypes are now serializable. This is an
experimental feature which hasn't yet been extensively tested. Please test and give us your feedback!
</action>
<action type="add">
Switch REST server to using HttpServletRequest#getContextPath() to get
the servlet's context path. This means that the server should behave more
predictably, and should work in servlet 2.4 environments. Thanks to
Ken Zeisset for the suggestion!
</action>
<action type="add" issue="200">
Vagrant environment now has an apt recipt to ensure that
package lists are up to date. Thanks to GitHub user
Brian S. Corbin (@corbinbs) for thr contribution!
</action>
<action type="add">
JPA server and generic client now both support the _tag search parameter
</action>
</release>
<release version="1.1" date="2015-07-13">
<action type="add">

View File

@ -178,7 +178,7 @@
<h4>Search - Other Query Options</h4>
<p>
The fluent search also has methods for sorting, limiting, specifying
JSON encoding, _include, _revinclude, _lastUpdated, etc.
JSON encoding, _include, _revinclude, _lastUpdated, _tag, etc.
</p>
<macro name="snippet">
<param name="id" value="searchAdv" />

View File

@ -184,7 +184,11 @@
this address into their URL bar, they will get nice formatted HTML back with a human readable version
of the content. This is helpful for testers.
</p>
<p>
To see an example of how this looks, see our demo server using the following example
query:
<a href="http://fhirtest.uhn.ca/baseDstu2/Patient/">http://fhirtest.uhn.ca/baseDstu2/Patient</a>
</p>
<p>
The following example shows how to register this interceptor within
a FHIR RESTful server.

1
vagrant/Vagrantfile vendored
View File

@ -66,6 +66,7 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
chef.roles_path = "./chef/roles"
chef.data_bags_path = "./chef/data_bags"
chef.add_recipe 'apt'
chef.add_recipe 'nmap'
chef.add_recipe 'mysql::server'
chef.add_recipe 'mysql::client'

15
vagrant/chef/cookbooks/apt/.gitignore vendored Normal file
View File

@ -0,0 +1,15 @@
.vagrant
Berksfile.lock
Gemfile.lock
*~
*#
.#*
\#*#
.*.sw[a-z]
*.un~
.bundle
.cache
.kitchen
bin
.kitchen.local.yml
.coverage

View File

@ -0,0 +1,47 @@
---
driver_config:
digitalocean_client_id: <%= ENV['DIGITAL_OCEAN_CLIENT_ID'] %>
digitalocean_api_key: <%= ENV['DIGITAL_OCEAN_API_KEY'] %>
provisioner:
name: chef_zero
require_chef_omnibus: latest
platforms:
- name: ubuntu-1004
driver_plugin: digitalocean
driver_config:
image_id: 5566812
flavor_id: 63
region_id: 4
ssh_key_ids: <%= ENV['DIGITAL_OCEAN_SSH_KEY_IDS'] %>
ssh_key: <%= ENV['DIGITAL_OCEAN_SSH_KEY_PATH'] %>
run_list:
- recipe[apt]
- name: ubuntu-1204
driver_plugin: digitalocean
driver_config:
image_id: 5588928
flavor_id: 63
region_id: 4
ssh_key_ids: <%= ENV['DIGITAL_OCEAN_SSH_KEY_IDS'] %>
ssh_key: <%= ENV['DIGITAL_OCEAN_SSH_KEY_PATH'] %>
run_list:
- recipe[apt]
- name: ubuntu-1404
driver_plugin: digitalocean
driver_config:
image_id: 5141286
flavor_id: 63
region_id: 4
ssh_key_ids: <%= ENV['DIGITAL_OCEAN_SSH_KEY_IDS'] %>
ssh_key: <%= ENV['DIGITAL_OCEAN_SSH_KEY_PATH'] %>
run_list:
- recipe[apt]
suites:
- name: default
run_list:
- recipe[apt]

View File

@ -0,0 +1,62 @@
driver:
name: vagrant
platforms:
- name: debian-7.2.0
run_list: apt::default
# - name: debian-8.0
# run_list: apt::default
- name: ubuntu-10.04
run_list: apt::default
- name: ubuntu-12.04
run_list: apt::default
- name: ubuntu-13.04
run_list: apt::default
- name: ubuntu-13.10
run_list: apt::default
- name: ubuntu-14.04
run_list: apt::default
# driver:
# box: chef/ubuntu-14.04
suites:
- name: default
run_list:
- recipe[minitest-handler]
- recipe[apt_test]
- name: cacher-client
run_list:
- recipe[minitest-handler]
- recipe[apt_test::cacher-client]
- name: cacher-ng
run_list:
- recipe[minitest-handler]
- recipe[apt_test::cacher-ng]
- name: cacher-ng-client
run_list:
- recipe[minitest-handler]
- recipe[apt_test::cacher-ng-client]
attributes:
apt:
cacher_dir: '/tmp/apt-cacher'
cacher_port: '9876'
cacher_interface: 'eth0'
compiletime: true
- name: lwrps
run_list:
- recipe[minitest-handler]
- recipe[apt_test::lwrps]
- name: unattended-upgrades
run_list:
- recipe[minitest-handler]
- recipe[apt_test::unattended-upgrades]
attributes:
apt:
unattended_upgrades:
enable: true

View File

@ -0,0 +1,37 @@
AllCops:
Exclude:
- vendor/**
- metadata.rb
- Guardfile
- test/cookbooks/apt_test/metadata.rb
- .kitchen/**
# Disable ABCSize because it doesn't fit well with resources
AbcSize:
Enabled: false
AssignmentInCondition:
Enabled: false
ClassAndModuleChildren:
Enabled: false
ClassLength:
Enabled: false
CyclomaticComplexity:
Max: 15
Documentation:
Enabled: false
Encoding:
Enabled: false
FileName:
Enabled: false
HashSyntax:
Enabled: false
LineLength:
Enabled: false
MethodLength:
Enabled: false
ParameterLists:
Enabled: false
# StringLiterals:
# EnforcedStyle: double_quotes
TrailingComma:
Enabled: false

View File

@ -0,0 +1,44 @@
language: ruby
bundler_args: --without kitchen_vagrant
rvm:
- 2.1.0
before_install:
- echo -n $DO_KEY_CHUNK_{0..30} >> ~/.ssh/id_do.base64
- cat ~/.ssh/id_do.base64 | tr -d ' ' | base64 --decode > ~/.ssh/id_do.pem
script:
- bundle exec rake travis --trace
after_script:
- bundle exec kitchen destroy
- cat .kitchen/logs/*
env:
global:
- secure: h2vmDV0AjfSWpUCerHIe9uAR9Od0QDoSRPidEiCjrhNCvaEIz+xFQl3M8eYmHBC31GABdEsiDLHQmj6bPtGOuvceKp37qc9V/h2/oPpPvW2HDjMT6mO6Qx1a5Pv4Xb0PhlUfseZCLDURi/0bM5czxGLH+oqluVEzgrM48m/YWaI=
- secure: fXvnhXK/ckP6EyFvGdYnh0YFwc1q+kF5HYUn3plOn7gytiERo+QwXqsnGtueiqrUzlCnqTWAj1T8wIuiPPmAUr3Ek/LUq1UwVcLYC9Wa2uGeTSooY6k1tzG1mtm+4njpIXxvOZ37NG2TwHLSG15iuJff6dtBE667/r88FjAGxgA=
- secure: NzFG53vCyET7REDbiDBA6AlKwgQtAUnb/2IyCyRwi/Svpf5UWdnqiHD9vepsaLQ+tnJPnCBelP5vM+H7Ln/uWLN39WPz4+36Dry6cWRgTIRG94jCKg3KQJvs6Z+V4bHwRdtvMO5HeAvJUCKRKsIW15odnnPPgPf3OrCHOfQK3Ko=
- secure: 3n0wmPKd+SBBP7prduer7jtIBLAvYP3T0/M+PupH6A8cKNL17kafQO9BwDSfcrRilX0UfOEND2Yq3Au6OfBjmKaFyScUdI5DOT+GWiBcYl9fbmtpz9KG6H8iWG8tIyJQ7vfV6pev8BGDQsmsIBu4SPYTUKUegtvkmmVoeV2je+4=
- secure: yrAlzIzT5sMpJ6VbjSjGLPlMH8LdPNBymaf/5FRxt1n5qLR2GQt9wxaCzGyjhgHo6iAdf3Dw9//YJ8vctnF61XqDOkpc6sW1d8IVZXHPOwb0mr94yQgYWXS/FLk7CFGuELYvKYMLIAXkq/QMOMyro2YLhTD25NblcxTpk5PuJXc=
- secure: 1FMkzei96uga+Fhqb3BK7dIiFj+ItiQhh1vHlotoLecDlDDT0o1YV8jBueLyZiVuFo/n4rKD8zMeCh3g5gfvWGQgZXbxEwMOLixtrW8bnOt/qAGs3qI6H57zXzWU2voUeyWa+ExorBMf1WL1RfIE6S/MlZNJW2OmWKceEaYrsLI=
- secure: ulI/7FdP1JVs61bi7CX3UwmG2v7SzHKfjf3P/wWvbCAO8Z/By/gnHmUn6I0EKsUEA4Gx3kXH8DmVtOZdcYLiNTnWGS38AxPnOfLYa0Lv/h7qqze4MFo5FliNB0iKaq0qn+L/eGYQSlr9e5Opf1Qdp2E47UUFd9VMaCSRKvIpqG4=
- secure: bcfXOTCxjA5Gv2BZWkjO8ukm5Z+t8AZglfvw7VRSqAe4BkyO62WmjONi2qYduB8nAX31IzKMEMOsA8zy9V96B+iAhdc3K5LTaa9VIocaNKoq0lgbLrqw3gW969p1vEbBzSiIn+3bDs7arX1LQ98e9UVt2hBQodSYicRzUuscbSw=
- secure: 2AeqJEMU9QYs40mfX6JppzAMsFJwrtFzYJVwgiN9EGCSqINzEIvnNB2z//nHsMlL6puP0VvapkCYHLDVHi2WHL0fSkkwyyGAfQVR01iM3CSjCl4j9h9Nv6bG78zNItQX64vg9FarTptqrZO/OnaT7dXUfGcAbr8cx9zJRv2fyMw=
- secure: Vx4VZUEF5ptw0lwHtLyMKcBRZwcpApsfAMgj/amqzAhFswjJoafHJ4Ryee+mrg46yXkaXed18xRu7pU/tXLGdp6vuvWFaC/1zCNfsdQBv+BAr46Ig4OB5mE/rwGOiNwbhSj9iHpKGPtUTwOHHqCqP/7ktR1TDIAmB5Esp0QBNFI=
- secure: 0ygYNLFO7ZBI7SH1PBt2ALmwtJfZ9ltCxOARP8ILkgCwLhczolcoT81/kfKigkP75dwYXU9LHiROm2GxFFH9reQdb5X2G3ik6/Xxn6KC1ewIuf8M7+qZx8p//ByazW7OZcvFrfGhsX/LJ5NfAC56Wii88oCUTYEGdM+MIPk8rzQ=
- secure: a0vsypNUkFkdnB2JiI/ZYd+hBfGC2pJt6JovUJr2sglZ0XvU7gyNT3iUmL3I22pM1gh+iAFPtS++OY0OSKRWnEbe7nMDY41soQW9UnfroexBVd+c1sYbJwbLJyTS2I3HxjIikWC2fGhySCX7ryghTQwJddGSh+q9eM0LgbvJ9Tc=
- secure: NUocMJTpGO7PWIMih4kjHGTRvb2vc6ep+fclviipkPzlQ5Ciabco1wW0HQJTX16JINgGVnzwFY16HFylyM74bcZoiSfGsN6E5GAmg5ZRxtpVs2wLHmsrMJxiT3VVMPHkBnZJXBNIcuMw6PAtiAcrOCyNY3Zuig1IuOERt12U2BY=
- secure: oLMuVgRvxDjYCb/hnA3YMJPDAAxyG2a7aUoGQHijSSBxL8VSW7BjfplUViCpWCsQADZgxLGHgfNUETAzHwheDm1TJT1KHVrYUXPDnLXgO89DvzrkLXlrr6JbaDMGUjG7fEEBNDnz5qycLiaoItX7x4/GPhSPOZ45q/64rW3Jvl4=
- secure: OnTKGDs568hSzE5sT9gQhY+nB1xHpnEMoT24UQybPn7Za79tJCkl3WlnqF2sd3+ImsT62xf6PNqRUue8TLVQLCVXCeStrIFPkdp0sps9xtFdNbi6Vb3yrq8QjU4RAQEz5+g8KcmycYMvF3M09lt7jAv8woebXkXdnzHz3IWhwTs=
- secure: 341IG8qb2JKqGDXGsx2a8xEVlkjILA6bSkWqZb9uhoEyW4je7PsqZdCfmKoAcau4I6+sBANu++qARJ58ZpWu+DJzuaKXkhSkdzo/MSykPK04I62v2qhRXUrhkpkXYAB4xK4wKFaCQWVHiCeV5jhEAayZxMB1gLwtxnZRRYXEhY0=
- secure: tvd71+q0xvglcUj6ugSw7oPlruTuNH/XGVh/k6B991zM27NQInmNeMWQa/WE7f8s2xSfG8f9cOtf11uDbPSHgyZj3T6CphfIl5sbT04zFO/1MfI5rbMRBXHvFpUWCb4gS+XUJ146DccuZInF9NI1e3aXNK6u3uPgxmn6DomVRlY=
- secure: BrMErE+J4TneCAGtDe5s8LQjhOJ6fTJSlA/dtmVx+LhzhyUA303wHCziPxrU2PJDL5fGe3r5zX83uoIXwKmU3kb2jRpy7SxF0kdsxqgdbzCnWINRDX5o0TH7AAViUA+nRccWF8wqNWsvkIhv6Pbr1u8B5xScPvBXhEuHJX2iVxo=
- secure: W3o/ae9BZDFNKe0UHGGDuYzriEvf/Eon+miqSwSZ/+rBuTeiX++3dyAMG/4XHeuNDgZ6H7qGtxlkqODK9AHZps5tFZ/zmVzXfzqRItIrGmGLKD7UvbIoS/C5fovhxIwMyWnlXdWeNf4o0QWJed6I188IlDumCxrmnWIWlueap6I=
- secure: rSCNg1LnxNjk/ux80iLQrcHqagWf80PBQf0kM9Wj5dD1nLWvbRMSSeXhiOdNY0ZD9RMROdjupsbFShdF788wAi7ITfhrMf09ys0D3/8ZDmCd51WAUvuutxMEz/TJKTWKItr2gbuRoXvv/hQ9DEWXyHx1A9DaDjwYGBH9bnYmgfs=
- secure: bHD0y307k3vUyA5cYdNc62Tq78r4HX8F4RG8bkgDAP0Z0u8SCfYunk89kw2NCF+qlo+ux84lhh2n/HKAwIdkupQSJaPGO4i241i8pUd1RA0T+CfjvdmMk6KjgbItauAhctgy61BTRJzoLAZQ75JurHLAjc5JNfSxsa1xQGsWIVQ=
- secure: A7NVQrmbAZhwIz+lkDcwX1zw+GJjLbwnW4/A0cCGcZObQxTqM7W6+B6UG9efm2vmxER9xtjstiH9wsLtJYerKxv05jwXoKlq/J+BVu2dTI9S6SqLas6Lo09XFfqtmYKgbV6R2CKDt8hT2a5A/Wp1hK4URjifu2gel/3MO6eeiJs=
- secure: BQCOwcb4u4spzd20vaUSkJycJ0oaojdyucmUV9pRYADH+jDEcCiL52L+bMxGZ+5vYPITG9wG2Kjv8VroyIuYfADMjZJjzMOMiwpjTWxoH7gA/12D8p7FcP9npllJgNg0TMvZUULVx2w2JQEGyq3Kfp2oKHfbgkBhtiSDH8mjSqc=
- secure: ODDYK3EogzOZ4rd/IW3HRAn+Ynpi1ob/lG7udBiiFhOZB8IWzZkNniRBZv60pOVq62YF0EidkNR4MK3Ln+wh3KLkqBWuR86ORgFmGazGxYlUbAfBfwt75FdK2+WAwyLGR3H7eqgTN+Y4U+GyPMUFfMBXbE73sX8Si2ldLy7n5ZE=
- secure: Mk6OHiJ5i4T+/3X5mLOhRuqif7M2cyTPbjNxNhW0oDQG4KB8M+18hDklwnQPpiXOL4LmuuSGDWgOZYnlZHFdLTzj5/nmbfh2qbr30Aqj8OgRnO/jjjU/BrcgBM2zrlH/TOKl5HqHp7bLesHkfTNzNy5IeIuRwZN/8qKNV1HZdtk=
- secure: GyPuciPuxMTNxr1igDPQAAvZdTE4bGIzVM4YpURvZngvhxQgWtvF09nV1FfNQAz643aq1bjbZ1ThfuOagWwTRUVqTgstxwCau/EGOAnoMXt1wDfvBuxpxLK2WDnO8PHYTDtpcnes5D6+45K5Z4bFAs0gIw/XoF0tZiCKVEo+OR0=
- secure: ix+m/F8qUKdjGpBLUW+okt00kmxFOAi7FKi0ndnjQPnHdygMec00tCxcvW4P16QsjpQq7w098Fsjc2V28hMo4RpH0JFPxnnfFttDZfk15UydrYD65EXhpyvh/xmQYd1cCK+YhymhPc0bOz0d7Ava7H7AGfBUkC0DzMdizpbB/pQ=
- secure: ZjxBwneeNa1whozgua2Jx3K9EA0EfaFCjsyB5SGmS8cALzLY4EJawH8iiSGapJrCxz58jK1z3ISdu9a7l5ne85fYI+WuHyTC7QVbW5OpRrOJMwTXf2/hRTVuavp9fA5W7B5nhoqgHMR56YXSaEO6juXiSztsYF7kJiGdCO0f6fQ=
- secure: zavu1UqfqRVh5hFaGdopn32B1ysW1sK769L+cSQnEQprDXB11uBcTJgBX104sw1zUnB0/QTfuZ3eKkhSpDpFg66I7IpqW/Aw7iWVa2EI/eGnQ5vOJwxWA/Bd08H5tpeXSCnjSOQp/Ac/0vhZy2DmhToKDPJakEtRP+/eaqbFNgc=
- secure: omEb6OGAUVSwHvFqUqqw3z16wDv0YrJzQZgHLZuKD8CvC3HvPDQaykqzvFtqrEWAUl5rZf1bSZ/jylximogKzx2+ENn5TjveJQTzQQwVw9FO/Jn8XVM0x7A3K86JpI0azG4LtFAaqpd4mWIAH5ZFeNYB2x6D2jrjXOajLoJ6zmM=

View File

@ -0,0 +1,8 @@
source 'https://supermarket.chef.io'
metadata
group :integration do
cookbook 'minitest-handler'
cookbook 'apt_test', :path => './test/cookbooks/apt_test'
end

View File

@ -0,0 +1,217 @@
apt Cookbook CHANGELOG
======================
v2.7.0 (2015-03-23)
-------------------
- Support Debian 8.0
- Filename verification for LWRPs
- Support SSL enabled apt repositories
v2.6.1 (2014-12-29)
-------------------
- Remove old preference files without .pref extension from previous versions
v2.6.0 (2014-09-09)
-------------------
- Always update on first run - check
- Adding ppa support for apt_repository
v2.5.3 (2014-08-14)
-------------------
- #87 - Improve default settings, account for non-linux platforms
v2.5.2 (2014-08-14)
-------------------
- Fully restore fully restore 2.3.10 behaviour
v2.5.1 (2014-08-14)
-------------------
- fix breakage introduced in apt 2.5.0
v2.5.0 (2014-08-12)
-------------------
- Add unattended-upgrades recipe
- Only update the cache for the created repository
- Added ChefSpec matchers and default_action for resources
- Avoid cloning resource attributes
- Minor documentation updates
v2.4.0 (2014-05-15)
-------------------
- [COOK-4534]: Add option to update apt cache at compile time
v2.3.10 (2014-04-23)
--------------------
- [COOK-4512] Bugfix: Use empty PATH if PATH is nil
v2.3.8 (2014-02-14)
-------------------
### Bug
- **[COOK-4287](https://tickets.chef.io/browse/COOK-4287)** - Cleanup the Kitchen
v2.3.6
------
* [COOK-4154] - Add chefspec matchers.rb file to apt cookbook
* [COOK-4102] - Only index created repository
v2.3.6
------
* [COOK-4154] - Add chefspec matchers.rb file to apt cookbook
* [COOK-4102] - Only index created repository
v2.3.4
------
No change. Version bump for toolchain sanity
v2.3.2
------
- [COOK-3905] apt-get-update-periodic: configuration for the update period
- Updating style for rubocops
- Updating test-kitchen harness
v2.3.0
------
### Bug
- **[COOK-3812](https://tickets.chef.io/browse/COOK-3812)** - Add a way to bypass the apt existence check
### Improvement
- **[COOK-3567](https://tickets.chef.io/browse/COOK-3567)** - Allow users to bypass apt-cache via attributes
v2.2.1
------
### Improvement
- **[COOK-664](https://tickets.chef.io/browse/COOK-664)** - Check platform before running apt-specific commands
v2.2.0
------
### Bug
- **[COOK-3707](https://tickets.chef.io/browse/COOK-3707)** - multiple nics confuse apt::cacher-client
v2.1.2
------
### Improvement
- **[COOK-3551](https://tickets.chef.io/browse/COOK-3551)** - Allow user to set up a trusted APT repository
v2.1.1
------
### Bug
- **[COOK-1856](https://tickets.chef.io/browse/COOK-1856)** - Match GPG keys without case sensitivity
v2.1.0
------
- [COOK-3426]: cacher-ng fails with restrict_environment set to true
- [COOK-2859]: cacher-client executes out of order
- [COOK-3052]: Long GPG keys are downloaded on every run
- [COOK-1856]: apt cookbook should match keys without case sensitivity
- [COOK-3255]: Attribute name incorrect in README
- [COOK-3225]: Call use_inline_resources only if defined
- [COOK-3386]: Cache dir for apt-cacher-ng
- [COOK-3291]: apt_repository: enable usage of a keyserver on port 80
- Greatly expanded test coverage with ChefSpec and Test-Kitchen
v2.0.0
------
### Bug
- [COOK-2258]: apt: LWRP results in error under why-run mode in apt 1.9.0 cookbook
v1.10.0
-------
### Improvement
- [COOK-2885]: Improvements for apt cache server search
### Bug
- [COOK-2441]: Apt recipe broken in new chef version
- [COOK-2660]: Create Debian 6.0 "squeeze" specific template for
apt-cacher-ng
v1.9.2
------
- [COOK-2631] - Create Ubuntu 10.04 specific template for apt-cacher-ng
v1.9.0
------
- [COOK-2185] - Proxy for apt-key
- [COOK-2338] - Support pinning by glob() or regexp
v1.8.4
------
- [COOK-2171] - Update README to clarify required Chef version: 10.18.0
or higher.
v1.8.2
------
- [COOK-2112] - need [] around "arch" in sources.list entries
- [COOK-2171] - fixes a regression in the notification
v1.8.0
------
- [COOK-2143] - Allow for a custom cacher-ng port
- [COOK-2171] - On `apt_repository.run_action(:add)` the source file
is not created.
- [COOK-2184] - apt::cacher-ng, use `cacher_port` attribute in
acng.conf
v1.7.0
------
- [COOK-2082] - add "arch" parameter to apt_repository LWRP
v1.6.0
------
- [COOK-1893] - `apt_preference` use "`package_name`" resource instead of "name"
- [COOK-1894] - change filename for sources.list.d files
- [COOK-1914] - Wrong dir permissions for /etc/apt/preferences.d/
- [COOK-1942] - README.md has wrong name for the keyserver attribute
- [COOK-2019] - create 01proxy before any other apt-get updates get executed
v1.5.2
------
- [COOK-1682] - use template instead of file resource in apt::cacher-client
- [COOK-1875] - cacher-client should be Environment-aware
V1.5.0
------
- [COOK-1500] - Avoid triggering apt-get update
- [COOK-1548] - Add execute commands for autoclean and autoremove
- [COOK-1591] - Setting up the apt proxy should leave https
connections direct
- [COOK-1596] - execute[apt-get-update-periodic] never runs
- [COOK-1762] - create /etc/apt/preferences.d directory
- [COOK-1776] - apt key check isn't idempotent
v1.4.8
------
* Adds test-kitchen support
- [COOK-1435] - repository lwrp is not idempotent with http key
v1.4.6
------
- [COOK-1530] - apt_repository isn't aware of update-success-stamp
file (also reverts COOK-1382 patch).
v1.4.4
------
- [COOK-1229] - Allow cacher IP to be set manually in non-Chef Solo
environments
- [COOK-1530] - Immediately update apt-cache when sources.list file is dropped off
v1.4.2
------
- [COOK-1155] - LWRP for apt pinning
v1.4.0
------
- [COOK-889] - overwrite existing repo source files
- [COOK-921] - optionally use cookbook\_file or remote\_file for key
- [COOK-1032] - fixes problem with apt repository key installation

View File

@ -0,0 +1,29 @@
If you would like to contribute, please open a ticket in JIRA:
* http://tickets.chef.io
Create the ticket in the COOK project and use the cookbook name as the
component.
For all code contributions, we ask that contributors sign a
contributor license agreement (CLA). Instructions may be found here:
* http://wiki.chef.io/display/chef/How+to+Contribute
When contributing changes to individual cookbooks, please do not
modify the version number in the metadata.rb. Also please do not
update the CHANGELOG.md for a new version. Not all changes to a
cookbook may be merged and released in the same versions. Chef Software will
handle the version updates during the release process. You are welcome
to correct typos or otherwise make updates to documentation in the
README.
If a contribution adds new platforms or platform versions, indicate
such in the body of the commit message(s), and update the relevant
COOK ticket. When writing commit messages, it is helpful for others if
you indicate the COOK ticket. For example:
git commit -m '[COOK-1041] Updated pool resource to correctly delete.'
In the ticket itself, it is also helpful if you include log output of
a successful Chef run, but this is not absolutely required.

View File

@ -0,0 +1,37 @@
source 'https://rubygems.org'
group :lint do
gem 'foodcritic', '~> 3.0'
gem 'rubocop', '~> 0.23'
gem 'rainbow', '< 2.0'
end
group :unit do
gem 'berkshelf', '~> 3.0.0.beta6'
gem 'chefspec', '~> 4.0'
end
group :kitchen_common do
gem 'test-kitchen', '~> 1.2'
end
group :kitchen_vagrant do
gem 'kitchen-vagrant', '~> 0.11'
end
group :kitchen_cloud do
gem 'kitchen-digitalocean'
gem 'kitchen-ec2'
end
group :development do
gem 'ruby_gntp'
gem 'growl'
gem 'rb-fsevent'
gem 'guard', '~> 2.4'
gem 'guard-kitchen'
gem 'guard-foodcritic'
gem 'guard-rspec'
gem 'guard-rubocop'
gem 'rake'
end

View File

@ -0,0 +1,35 @@
# A sample Guardfile
# More info at https://github.com/guard/guard#readme
# guard 'kitchen' do
# watch(%r{test/.+})
# watch(%r{^recipes/(.+)\.rb$})
# watch(%r{^attributes/(.+)\.rb$})
# watch(%r{^files/(.+)})
# watch(%r{^templates/(.+)})
# watch(%r{^providers/(.+)\.rb})
# watch(%r{^resources/(.+)\.rb})
# end
guard 'foodcritic', cookbook_paths: '.', all_on_start: false do
watch(%r{attributes/.+\.rb$})
watch(%r{providers/.+\.rb$})
watch(%r{recipes/.+\.rb$})
watch(%r{resources/.+\.rb$})
watch('metadata.rb')
end
guard 'rubocop', all_on_start: false do
watch(%r{attributes/.+\.rb$})
watch(%r{providers/.+\.rb$})
watch(%r{recipes/.+\.rb$})
watch(%r{resources/.+\.rb$})
watch('metadata.rb')
end
guard :rspec, cmd: 'bundle exec rspec', all_on_start: false, notification: false do
watch(%r{^libraries/(.+)\.rb$})
watch(%r{^spec/(.+)_spec\.rb$})
watch(%r{^(recipes)/(.+)\.rb$}) { |m| "spec/#{m[1]}_spec.rb" }
watch('spec/spec_helper.rb') { 'spec' }
end

View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,281 @@
apt Cookbook
============
[![Gitter](https://badges.gitter.im/Join Chat.svg)](https://gitter.im/chef-cookbooks/apt?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
[![Cookbook Version](https://img.shields.io/cookbook/v/apt.svg)][cookbook]
[![Build Status](https://img.shields.io/travis/opscode-cookbooks/apt.svg)][travis]
[cookbook]: https://community.chef.io/cookbooks/apt
[travis]: https://travis-ci.org/opscode-cookbooks/apt
This cookbook includes recipes to execute apt-get update to ensure the local APT package cache is up to date. There are recipes for managing the apt-cacher-ng caching proxy and proxy clients. It also includes a LWRP for managing APT repositories in /etc/apt/sources.list.d as well as an LWRP for pinning packages via /etc/apt/preferences.d.
Requirements
------------
**Version 2.0.0+ of this cookbook requires Chef 11.0.0 or later**. If your Chef version is earlier than 11.0.0, use version 1.10.0 of this cookbook.
Version 1.8.2 to 1.10.0 of this cookbook requires **Chef 10.16.4** or later.
If your Chef version is earlier than 10.16.4, use version 1.7.0 of this cookbook.
### Platform
Please refer to the [TESTING file](TESTING.md) to see the currently (and passing) tested platforms. The release was tested on:
* Ubuntu 10.04
* Ubuntu 12.04
* Ubuntu 13.04
* Debian 7.1
* Debian 6.0 (have with manual testing)
May work with or without modification on other Debian derivatives.
-------
### default
This recipe manually updates the timestamp file used to only run `apt-get update` if the cache is more than one day old.
This recipe should appear first in the run list of Debian or Ubuntu nodes to ensure that the package cache is up to date before managing any `package` resources with Chef.
This recipe also sets up a local cache directory for preseeding packages.
**Including the default recipe on a node that does not support apt (such as Windows) results in a noop.**
### cacher-client
Configures the node to use the `apt-cacher-ng` server as a client.
#### Bypassing the cache
Occasionally you may come across repositories that do not play nicely when the node is using an `apt-cacher-ng` server. You can configure `cacher-client` to bypass the server and connect directly to the repository with the `cache_bypass` attribute.
To do this, you need to override the `cache_bypass` attribute with an array of repositories, with each array key as the repository URL and value as the protocol to use:
```json
{
...,
'apt': {
...,
'cache_bypass': {
URL: PROTOCOL
}
}
}
```
For example, to prevent caching and directly connect to the repository at `download.oracle.com` via http:
```json
{
'apt': {
'cache_bypass': {
'download.oracle.com': 'http'
}
}
}
```
### cacher-ng
Installs the `apt-cacher-ng` package and service so the system can provide APT caching. You can check the usage report at http://{hostname}:3142/acng-report.html.
If you wish to help the `cacher-ng` recipe seed itself, you must now explicitly include the `cacher-client` recipe in your run list **after** `cacher-ng` or you will block your ability to install any packages (ie. `apt-cacher-ng`).
### unattended-upgrades
Installs and configures the `unattended-upgrades` package to provide automatic package updates. This can be configured to upgrade all packages or to just install security updates by setting `['apt']['unattended_upgrades']['allowed_origins']`.
To pull just security updates, you'd set `allowed_origins` to something link `["Ubuntu trusty-security"]` (for Ubuntu trusty) or `["Debian wheezy-security"]` (for Debian wheezy).
Attributes
----------
### General
* `['apt']['compile_time_update']` - force the default recipe to run `apt-get update` at compile time.
* `['apt']['periodic_update_min_delay']` - minimum delay (in seconds) beetween two actual executions of `apt-get update` by the `execute[apt-get-update-periodic]` resource, default is '86400' (24 hours)
### Caching
* `['apt']['cacher_ipaddress']` - use a cacher server (or standard proxy server) not available via search
* `['apt']['cacher_interface]` - interface to connect to the cacher-ng service, no default.
* `['apt']['cacher_port']` - port for the cacher-ng service (either client or server), default is '3142'
* `['apt']['cacher_ssl_support']` - indicates whether the cacher supports upstream SSL servers, default is 'false'
* `['apt']['cacher_dir']` - directory used by cacher-ng service, default is '/var/cache/apt-cacher-ng'
* `['apt']['cacher-client']['restrict_environment']` - restrict your node to using the `apt-cacher-ng` server in your Environment, default is 'false'
* `['apt']['compiletime']` - force the `cacher-client` recipe to run before other recipes. It forces apt to use the proxy before other recipes run. Useful if your nodes have limited access to public apt repositories. This is overridden if the `cacher-ng` recipe is in your run list. Default is 'false'
* `['apt']['cache_bypass']` - array of URLs to bypass the cache. Accepts the URL and protocol to fetch directly from the remote repository and not attempt to cache
### Unattended Upgrades
* `['apt']['unattended_upgrades']['enable']` - enables unattended upgrades, default is false
* `['apt']['unattended_upgrades']['update_package_lists']` automatically update package list (`apt-get update`) daily, default is true
* `['apt']['unattended_upgrades']['allowed_origins']` — array of allowed apt origins from which to pull automatic upgrades, defaults to a guess at the system's main origin and should almost always be overridden
* `['apt']['unattended_upgrades']['package_blacklist']` an array of package which should never be automatically upgraded, defaults to none
* `['apt']['unattended_upgrades']['auto_fix_interrupted_dpkg']` attempts to repair dpkg state with `dpkg --force-confold --configure -a` if it exits uncleanly, defaults to false (contrary to the unattended-upgrades default)
* `['apt']['unattended_upgrades']['minimal_steps']` Split the upgrade into the smallest possible chunks. This makes the upgrade a bit slower but it has the benefit that shutdown while a upgrade is running is possible (with a small delay). Defaults to false.
* `['apt']['unattended_upgrades']['install_on_shutdown']` — Install upgrades when the machine is shuting down instead of doing it in the background while the machine is running. This will (obviously) make shutdown slower. Defaults to false.
* `['apt']['unattended_upgrades']['mail']` — Send email to this address for problems or packages upgrades. Defaults to no email.
* `['apt']['unattended_upgrades']['mail_only_on_error']` If set, email will only be set on upgrade errors. Otherwise, an email will be sent after each upgrade. Defaults to true.
* `['apt']['unattended_upgrades']['remove_unused_dependencies']` Do automatic removal of new unused dependencies after the upgrade. Defaults to false.
* `['apt']['unattended_upgrades']['automatic_reboot']` — Automatically reboots *without confirmation* if a restart is required after the upgrade. Defaults to false.
* `['apt']['unattended_upgrades']['dl_limit']` Limits the bandwidth used by apt to download packages. Value given as an integer in kb/sec. Defaults to nil (no limit).
Libraries
---------
There is an `interface_ipaddress` method that returns the IP address for a particular host and interface, used by the `cacher-client` recipe. To enable it on the server use the `['apt']['cacher_interface']` attribute.
Resources/Providers
-------------------
### `apt_repository`
This LWRP provides an easy way to manage additional APT repositories. Adding a new repository will notify running the `execute[apt-get-update]` resource immediately.
#### Actions
- :add: creates a repository file and builds the repository listing (default)
- :remove: removes the repository file
#### Attribute Parameters
- repo_name: name attribute. The name of the channel to discover
- uri: the base of the Debian distribution
- distribution: this is usually your release's codename...ie something like `karmic`, `lucid` or `maverick`
- components: package groupings... when in doubt use `main`
- arch: constrain package to a particular arch like `i386`, `amd64` or even `armhf` or `powerpc`. Defaults to nil.
- trusted: treat all packages from this repository as authenticated regardless of signature
- deb_src: whether or not to add the repository as a source repo as well - value can be `true` or `false`, default `false`.
- keyserver: the GPG keyserver where the key for the repo should be retrieved
- key: if a `keyserver` is provided, this is assumed to be the fingerprint, otherwise it can be either the URI to the GPG key for the repo, or a cookbook_file.
- key_proxy: if set, pass the specified proxy via `http-proxy=` to GPG.
- cookbook: if key should be a cookbook_file, specify a cookbook where the key is located for files/default. Defaults to nil, so it will use the cookbook where the resource is used.
#### Examples
Add the Zenoss repo:
```ruby
apt_repository 'zenoss' do
uri 'http://dev.zenoss.org/deb'
components ['main', 'stable']
end
```
Add the Nginx PPA, autodetect the key and repository url:
```ruby
apt_repository 'nginx-php' do
uri 'ppa:nginx/stable'
distribution node['lsb']['codename']
end
```
Add the JuJu PPA, grab the key from the keyserver, and add source repo:
```ruby
apt_repository 'juju' do
uri 'http://ppa.launchpad.net/juju/stable/ubuntu'
components ['main']
distribution 'trusty'
key 'C8068B11'
keyserver 'keyserver.ubuntu.com'
action :add
deb_src true
end
```
Add the Cloudera Repo of CDH4 packages for Ubuntu 12.04 on AMD64:
```ruby
apt_repository 'cloudera' do
uri 'http://archive.cloudera.com/cdh4/ubuntu/precise/amd64/cdh'
arch 'amd64'
distribution 'precise-cdh4'
components ['contrib']
key 'http://archive.cloudera.com/debian/archive.key'
end
```
Remove Zenoss repo:
```ruby
apt_repository 'zenoss' do
action :remove
end
```
### `apt_preference`
This LWRP provides an easy way to pin packages in /etc/apt/preferences.d. Although apt-pinning is quite helpful from time to time please note that Debian does not encourage its use without thorough consideration.
Further information regarding apt-pinning is available via http://wiki.debian.org/AptPreferences.
#### Actions
- :add: creates a preferences file under /etc/apt/preferences.d
- :remove: Removes the file, therefore unpin the package
#### Attribute Parameters
- package_name: name attribute. The name of the package
- glob: Pin by glob() expression or regexp surrounded by /.
- pin: The package version/repository to pin
- pin_priority: The pinning priority aka "the highest package version wins"
#### Examples
Pin libmysqlclient16 to version 5.1.49-3:
```ruby
apt_preference 'libmysqlclient16' do
pin 'version 5.1.49-3'
pin_priority '700'
end
```
Unpin libmysqlclient16:
```ruby
apt_preference 'libmysqlclient16' do
action :remove
end
```
Pin all packages from dotdeb.org:
```ruby
apt_preference 'dotdeb' do
glob '*'
pin 'origin packages.dotdeb.org'
pin_priority '700'
end
```
Usage
-----
Put `recipe[apt]` first in the run list. If you have other recipes that you want to use to configure how apt behaves, like new sources, notify the execute resource to run, e.g.:
```ruby
template '/etc/apt/sources.list.d/my_apt_sources.list' do
notifies :run, 'execute[apt-get update]', :immediately
end
```
The above will run during execution phase since it is a normal template resource, and should appear before other package resources that need the sources in the template.
Put `recipe[apt::cacher-ng]` in the run_list for a server to provide APT caching and add `recipe[apt::cacher-client]` on the rest of the Debian-based nodes to take advantage of the caching server.
If you want to cleanup unused packages, there is also the `apt-get autoclean` and `apt-get autoremove` resources provided for automated cleanup.
License & Authors
-----------------
- Author:: Joshua Timberman (joshua@chef.io)
- Author:: Matt Ray (matt@chef.io)
- Author:: Seth Chisamore (schisamo@chef.io)
```text
Copyright 2009-2013, Chef Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
```

View File

@ -0,0 +1,59 @@
require 'rspec/core/rake_task'
require 'rubocop/rake_task'
require 'foodcritic'
require 'kitchen'
# Style tests. Rubocop and Foodcritic
namespace :style do
desc 'Run Ruby style checks'
RuboCop::RakeTask.new(:ruby)
desc 'Run Chef style checks'
FoodCritic::Rake::LintTask.new(:chef) do |t|
t.options = {
fail_tags: ['any'],
tags: ['~FC005']
}
end
end
desc 'Run all style checks'
task style: ['style:chef', 'style:ruby']
# Rspec and ChefSpec
desc 'Run ChefSpec examples'
RSpec::Core::RakeTask.new(:spec)
# Integration tests. Kitchen.ci
namespace :integration do
desc 'Run Test Kitchen with Vagrant'
task :vagrant do
Kitchen.logger = Kitchen.default_file_logger
Kitchen::Config.new.instances.each do |instance|
instance.test(:always)
end
end
desc 'Run Test Kitchen with cloud plugins'
task :cloud do
run_kitchen = true
if ENV['TRAVIS'] == 'true' && ENV['TRAVIS_PULL_REQUEST'] != 'false'
run_kitchen = false
end
if run_kitchen
Kitchen.logger = Kitchen.default_file_logger
@loader = Kitchen::Loader::YAML.new(project_config: './.kitchen.cloud.yml')
config = Kitchen::Config.new(loader: @loader)
config.instances.each do |instance|
instance.test(:always)
end
end
end
end
desc 'Run all tests on Travis'
task travis: ['style', 'spec', 'integration:cloud']
# Default
task default: ['style', 'spec', 'integration:vagrant']

View File

@ -0,0 +1,187 @@
TESTING doc
========================
Bundler
-------
A ruby environment with Bundler installed is a prerequisite for using
the testing harness shipped with this cookbook. At the time of this
writing, it works with Ruby 2.0 and Bundler 1.5.3. All programs
involved, with the exception of Vagrant, can be installed by cd'ing
into the parent directory of this cookbook and running "bundle install"
Rakefile
--------
The Rakefile ships with a number of tasks, each of which can be ran
individually, or in groups. Typing "rake" by itself will perform style
checks with Rubocop and Foodcritic, ChefSpec with rspec, and
integration with Test Kitchen using the Vagrant driver by
default.Alternatively, integration tests can be ran with Test Kitchen
cloud drivers.
```
$ rake -T
rake integration:cloud # Run Test Kitchen with cloud plugins
rake integration:vagrant # Run Test Kitchen with Vagrant
rake spec # Run ChefSpec examples
rake style # Run all style checks
rake style:chef # Lint Chef cookbooks
rake style:ruby # Run Ruby style checks
rake travis # Run all tests on Travis
```
Style Testing
-------------
Ruby style tests can be performed by Rubocop by issuing either
```
bundle exec rubocop
```
or
```
rake style:ruby
```
Chef style tests can be performed with Foodcritic by issuing either
```
bundle exec foodcritic
```
or
```
rake style:chef
```
Spec Testing
-------------
Unit testing is done by running Rspec examples. Rspec will test any
libraries, then test recipes using ChefSpec. This works by compiling a
recipe (but not converging it), and allowing the user to make
assertions about the resource_collection.
Integration Testing
-------------------
Integration testing is performed by Test Kitchen. Test Kitchen will
use either the Vagrant driver or various cloud drivers to instantiate
machines and apply cookbooks. After a successful converge, tests are
uploaded and ran out of band of Chef. Tests should be designed to
ensure that a recipe has accomplished its goal.
Integration Testing using Vagrant
---------------------------------
Integration tests can be performed on a local workstation using
Virtualbox or VMWare. Detailed instructions for setting this up can be
found at the [Bento](https://github.com/chef/bento) project web site.
Integration tests using Vagrant can be performed with either
```
bundle exec kitchen test
```
or
```
rake integration:vagrant
```
Integration Testing using Cloud providers
-----------------------------------------
Integration tests can be performed on cloud providers using
Test Kitchen plugins. This cookbook ships a ```.kitchen.cloud.yml```
that references environmental variables present in the shell that
```kitchen test``` is ran from. These usually contain authentication
tokens for driving IaaS APIs, as well as the paths to ssh private keys
needed for Test Kitchen log into them after they've been created.
Examples of environment variables being set in ```~/.bash_profile```:
```
# digital_ocean
export DIGITAL_OCEAN_CLIENT_ID='your_bits_here'
export DIGITAL_OCEAN_API_KEY='your_bits_here'
export DIGITAL_OCEAN_SSH_KEY_IDS='your_bits_here'
# aws
export AWS_ACCESS_KEY_ID='your_bits_here'
export AWS_SECRET_ACCESS_KEY='your_bits_here'
export AWS_KEYPAIR_NAME='your_bits_here'
# joyent
export SDC_CLI_ACCOUNT='your_bits_here'
export SDC_CLI_IDENTITY='your_bits_here'
export SDC_CLI_KEY_ID='your_bits_here'
```
Integration tests using cloud drivers can be performed with either
```
export KITCHEN_YAML=.kitchen.cloud.yml
bundle exec kitchen test
```
or
```
rake integration:cloud
```
Digital Ocean Hint
------------------
At the time of this writing, you cannot find the numerical values
needed for your SSH_KEY_IDS from the GUI. Instead, you will need to
access the API from the command line.
curl -L 'https://api.digitalocean.com/ssh_keys/?client_id=your_bits_here&api_key=your_bits_here'
Words about .travis.yml
-----------------------
In order for Travis to perform integration tests on public cloud
providers, two major things need to happen. First, the environment
variables referenced by ```.kitchen.cloud.yml``` need to be made
available. Second, the private half of the ssh keys needed to log into
machines need to be dropped off on the machine.
The first part is straight forward. The travis gem can encrypt
environment variables against the public key on the Travis repository
and add them to the .travis.yml.
```
gem install travis
travis encrypt AWS_ACCESS_KEY_ID='your_bits_here' --add
travis encrypt AWS_SECRET_ACCESS_'your_bits_here' --add
travis encrypt AWS_KEYPAIR_NAME='your_bits_here' --add
travis encrypt EC2_SSH_KEY_PATH='~/.ssh/id_ec2.pem' --add
travis encrypt DIGITAL_OCEAN_CLIENT_ID='your_bits_here' --add
travis encrypt DIGITAL_OCEAN_API_KEY='your_bits_here' --add
travis encrypt DIGITAL_OCEAN_SSH_KEY_IDS='your_bits_here' --add
travis encrypt DIGITAL_OCEAN_SSH_KEY_PATH='~/.ssh/id_do.pem' --add
```
The second part is a little more complicated. Travis ENV variables are
restricted to 90 bytes, and will not fit an entire SSH key. This can
be worked around by breaking them up into 90 byte chunks, stashing
them into ENV variables, then digging them out in the
```before_install``` section of .travis.yml
Here is an AWK script to do the encoding.
```
base64 ~/.ssh/travisci_cook_digitalocean.pem | \
awk '{
j=0;
for( i=1; i<length; i=i+90 ) {
system("travis encrypt DO_KEY_CHUNK_" j "=" substr($0, i, 90) " --add");
j++;
}
}'
base64 ~/.ssh/travisci_cook_ec2.pem | \
awk '{
j=0;
for( i=1; i<length; i=i+90 ) {
system("travis encrypt EC2_KEY_CHUNK_" j "=" substr($0, i, 90)" --add");
j++;
}
}'
```
Then in .travis.yml:
```
before_install:
- echo -n $DO_KEY_CHUNK_{0..30} >> ~/.ssh/id_do.base64
- cat ~/.ssh/id_do.base64 | tr -d ' ' | base64 --decode > ~/.ssh/id_do.pem
- echo -n $EC2_KEY_CHUNK_{0..30} >> ~/.ssh/id_ec2.base64
- cat ~/.ssh/id_ec2.base64 | tr -d ' ' | base64 --decode > ~/.ssh/id_ec2.pem
```

View File

@ -0,0 +1,48 @@
#
# Cookbook Name:: apt
# Attributes:: default
#
# Copyright 2009-2013, Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
default['apt']['cacher-client']['restrict_environment'] = false
default['apt']['cacher_dir'] = '/var/cache/apt-cacher-ng'
default['apt']['cacher_interface'] = nil
default['apt']['cacher_port'] = 3142
default['apt']['cacher_ssl_support'] = false
default['apt']['caching_server'] = false
default['apt']['compiletime'] = false
default['apt']['compile_time_update'] = false
default['apt']['key_proxy'] = ''
default['apt']['cache_bypass'] = {}
default['apt']['periodic_update_min_delay'] = 86_400
default['apt']['launchpad_api_version'] = '1.0'
default['apt']['unattended_upgrades']['enable'] = false
default['apt']['unattended_upgrades']['update_package_lists'] = true
# this needs a good default
codename = node.attribute?('lsb') ? node['lsb']['codename'] : 'notlinux'
default['apt']['unattended_upgrades']['allowed_origins'] = [
"#{node['platform'].capitalize} #{codename}"
]
default['apt']['unattended_upgrades']['package_blacklist'] = []
default['apt']['unattended_upgrades']['auto_fix_interrupted_dpkg'] = false
default['apt']['unattended_upgrades']['minimal_steps'] = false
default['apt']['unattended_upgrades']['install_on_shutdown'] = false
default['apt']['unattended_upgrades']['mail'] = nil
default['apt']['unattended_upgrades']['mail_only_on_error'] = true
default['apt']['unattended_upgrades']['remove_unused_dependencies'] = false
default['apt']['unattended_upgrades']['automatic_reboot'] = false
default['apt']['unattended_upgrades']['automatic_reboot_time'] = 'now'
default['apt']['unattended_upgrades']['dl_limit'] = nil

View File

@ -0,0 +1 @@
APT::Update::Post-Invoke-Success {"touch /var/lib/apt/periodic/update-success-stamp 2>/dev/null || true";};

View File

@ -0,0 +1,50 @@
[DEFAULT]
;; All times are in seconds, but you can add a suffix
;; for minutes(m), hours(h) or days(d)
;; commented out address so apt-proxy will listen on all IPs
;; address = 127.0.0.1
port = 9999
cache_dir = /var/cache/apt-proxy
;; Control files (Packages/Sources/Contents) refresh rate
min_refresh_delay = 1s
complete_clientless_downloads = 1
;; Debugging settings.
debug = all:4 db:0
time = 30
passive_ftp = on
;;--------------------------------------------------------------
;; Cache housekeeping
cleanup_freq = 1d
max_age = 120d
max_versions = 3
;;---------------------------------------------------------------
;; Backend servers
;;
;; Place each server in its own [section]
[ubuntu]
; Ubuntu archive
backends =
http://us.archive.ubuntu.com/ubuntu
[ubuntu-security]
; Ubuntu security updates
backends = http://security.ubuntu.com/ubuntu
[debian]
;; Backend servers, in order of preference
backends =
http://debian.osuosl.org/debian/
[security]
;; Debian security archive
backends =
http://security.debian.org/debian-security
http://ftp2.de.debian.org/debian-security

View File

@ -0,0 +1,49 @@
#
# Cookbook Name:: apt
# Library:: helpers
#
# Copyright 2013 Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Apt
# Helpers for apt
module Helpers
# Determines if apt is installed on a system.
#
# @return [Boolean]
def apt_installed?
!which('apt-get').nil?
end
# Finds a command in $PATH
#
# @return [String, nil]
def which(cmd)
ENV['PATH'] = '' if ENV['PATH'].nil?
paths = (ENV['PATH'].split(::File::PATH_SEPARATOR) + %w(/bin /usr/bin /sbin /usr/sbin))
paths.each do |path|
possible = File.join(path, cmd)
return possible if File.executable?(possible)
end
nil
end
end
end
Chef::Recipe.send(:include, ::Apt::Helpers)
Chef::Resource.send(:include, ::Apt::Helpers)
Chef::Provider.send(:include, ::Apt::Helpers)

View File

@ -0,0 +1,17 @@
if defined?(ChefSpec)
def add_apt_preference(resource_name)
ChefSpec::Matchers::ResourceMatcher.new(:apt_preference, :add, resource_name)
end
def remove_apt_preference(resource_name)
ChefSpec::Matchers::ResourceMatcher.new(:apt_preference, :remove, resource_name)
end
def add_apt_repository(resource_name)
ChefSpec::Matchers::ResourceMatcher.new(:apt_repository, :add, resource_name)
end
def remove_apt_repository(resource_name)
ChefSpec::Matchers::ResourceMatcher.new(:apt_repository, :remove, resource_name)
end
end

View File

@ -0,0 +1,31 @@
#
# Cookbook Name:: apt
# library:: network
#
# Copyright 2013, Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module ::Apt
def interface_ipaddress(host, interface)
if interface
addresses = host['network']['interfaces'][interface]['addresses']
addresses.select do |ip, data|
return ip if data['family'].eql?('inet')
end
else
return host.ipaddress
end
end
end

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,38 @@
name 'apt'
maintainer 'Chef Software, Inc.'
maintainer_email 'cookbooks@chef.io'
license 'Apache 2.0'
description 'Configures apt and apt services and LWRPs for managing apt repositories and preferences'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '2.7.0'
recipe 'apt', 'Runs apt-get update during compile phase and sets up preseed directories'
recipe 'apt::cacher-ng', 'Set up an apt-cacher-ng caching proxy'
recipe 'apt::cacher-client', 'Client for the apt::cacher-ng caching proxy'
%w{ ubuntu debian }.each do |os|
supports os
end
attribute 'apt/cacher-client/restrict_environment',
:description => 'Whether to restrict the search for the caching server to the same environment as this node',
:default => 'false'
attribute 'apt/cacher_port',
:description => 'Default listen port for the caching server',
:default => '3142'
attribute 'apt/cacher_ssl_support',
:description => 'The caching server supports upstream SSL servers via CONNECT',
:default => 'false'
attribute 'apt/cacher_interface',
:description => 'Default listen interface for the caching server',
:default => nil
attribute 'apt/key_proxy',
:description => 'Passed as the proxy passed to GPG for the apt_repository resource',
:default => ''
attribute 'apt/caching_server',
:description => 'Set this to true if the node is a caching server',
:default => 'false'

View File

@ -0,0 +1,69 @@
#
# Cookbook Name:: apt
# Provider:: preference
#
# Copyright 2010-2011, Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
use_inline_resources if defined?(use_inline_resources)
def whyrun_supported?
true
end
# Build preferences.d file contents
def build_pref(package_name, pin, pin_priority)
"Package: #{package_name}\nPin: #{pin}\nPin-Priority: #{pin_priority}\n"
end
action :add do
preference = build_pref(
new_resource.glob || new_resource.package_name,
new_resource.pin,
new_resource.pin_priority
)
directory '/etc/apt/preferences.d' do
owner 'root'
group 'root'
mode 00755
recursive true
action :create
end
file "/etc/apt/preferences.d/#{new_resource.name}" do
action :delete
if ::File.exist?("/etc/apt/preferences.d/#{new_resource.name}")
Chef::Log.warn "Replacing #{new_resource.name} with #{new_resource.name}.pref in /etc/apt/preferences.d/"
end
end
file "/etc/apt/preferences.d/#{new_resource.name}.pref" do
owner 'root'
group 'root'
mode 00644
content preference
action :create
end
end
action :remove do
if ::File.exist?("/etc/apt/preferences.d/#{new_resource.name}.pref")
Chef::Log.info "Un-pinning #{new_resource.name} from /etc/apt/preferences.d/"
file "/etc/apt/preferences.d/#{new_resource.name}.pref" do
action :delete
end
end
end

View File

@ -0,0 +1,203 @@
#
# Cookbook Name:: apt
# Provider:: repository
#
# Copyright 2010-2011, Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
use_inline_resources if defined?(use_inline_resources)
def whyrun_supported?
true
end
# install apt key from keyserver
def install_key_from_keyserver(key, keyserver)
execute "install-key #{key}" do
if !node['apt']['key_proxy'].empty?
command "apt-key adv --keyserver-options http-proxy=#{node['apt']['key_proxy']} --keyserver hkp://#{keyserver}:80 --recv #{key}"
else
command "apt-key adv --keyserver #{keyserver} --recv #{key}"
end
action :run
not_if do
extract_fingerprints_from_cmd('apt-key finger').any? do |fingerprint|
fingerprint.end_with?(key.upcase)
end
end
end
end
# run command and extract gpg ids
def extract_fingerprints_from_cmd(cmd)
so = Mixlib::ShellOut.new(cmd, env: { 'LANG' => 'en_US' })
so.run_command
so.stdout.split(/\n/).map do |t|
if z = t.match(/^ +Key fingerprint = ([0-9A-F ]+)/)
z[1].split.join
end
end.compact
end
# install apt key from URI
def install_key_from_uri(uri)
key_name = uri.split(/\//).last
cached_keyfile = "#{Chef::Config[:file_cache_path]}/#{key_name}"
if new_resource.key =~ /http/
remote_file cached_keyfile do
source new_resource.key
mode 00644
action :create
end
else
cookbook_file cached_keyfile do
source new_resource.key
cookbook new_resource.cookbook
mode 00644
action :create
end
end
execute "install-key #{key_name}" do
command "apt-key add #{cached_keyfile}"
action :run
not_if do
installed_keys = extract_fingerprints_from_cmd('apt-key finger')
proposed_keys = extract_fingerprints_from_cmd("gpg --with-fingerprint #{cached_keyfile}")
(installed_keys & proposed_keys).sort == proposed_keys.sort
end
end
end
# build repo file contents
def build_repo(uri, distribution, components, trusted, arch, add_deb_src)
components = components.join(' ') if components.respond_to?(:join)
repo_options = []
repo_options << "arch=#{arch}" if arch
repo_options << 'trusted=yes' if trusted
repo_options = '[' + repo_options.join(' ') + ']' unless repo_options.empty?
repo_info = "#{uri} #{distribution} #{components}\n"
repo_info = "#{repo_options} #{repo_info}" unless repo_options.empty?
repo = "deb #{repo_info}"
repo << "deb-src #{repo_info}" if add_deb_src
repo
end
def get_ppa_key(ppa_owner, ppa_repo)
# Launchpad has currently only one stable API which is marked as EOL April 2015.
# The new api in devel still uses the same api call for +archive, so I made the version
# configurable to provide some sort of workaround if api 1.0 ceases to exist.
# See https://launchpad.net/+apidoc/
launchpad_ppa_api = "https://launchpad.net/api/#{node['apt']['launchpad_api_version']}/~%s/+archive/%s"
default_keyserver = 'keyserver.ubuntu.com'
require 'open-uri'
api_query = format("#{launchpad_ppa_api}/signing_key_fingerprint", ppa_owner, ppa_repo)
begin
key_id = open(api_query).read.delete('"')
rescue OpenURI::HTTPError => e
error = 'Could not access launchpad ppa key api: HttpError: ' + e.message
raise error
rescue SocketError => e
error = 'Could not access launchpad ppa key api: SocketError: ' + e.message
raise error
end
install_key_from_keyserver(key_id, default_keyserver)
end
# fetch ppa key, return full repo url
def get_ppa_url(ppa)
repo_schema = 'http://ppa.launchpad.net/%s/%s/ubuntu'
# ppa:user/repo logic ported from
# http://bazaar.launchpad.net/~ubuntu-core-dev/software-properties/main/view/head:/softwareproperties/ppa.py#L86
return false unless ppa.start_with?('ppa:')
ppa_name = ppa.split(':')[1]
ppa_owner = ppa_name.split('/')[0]
ppa_repo = ppa_name.split('/')[1]
ppa_repo = 'ppa' if ppa_repo.nil?
get_ppa_key(ppa_owner, ppa_repo)
format(repo_schema, ppa_owner, ppa_repo)
end
action :add do
# add key
if new_resource.keyserver && new_resource.key
install_key_from_keyserver(new_resource.key, new_resource.keyserver)
elsif new_resource.key
install_key_from_uri(new_resource.key)
end
file '/var/lib/apt/periodic/update-success-stamp' do
action :nothing
end
execute 'apt-cache gencaches' do
ignore_failure true
action :nothing
end
execute 'apt-get update' do
command "apt-get update -o Dir::Etc::sourcelist='sources.list.d/#{new_resource.name}.list' -o Dir::Etc::sourceparts='-' -o APT::Get::List-Cleanup='0'"
ignore_failure true
action :nothing
notifies :run, 'execute[apt-cache gencaches]', :immediately
end
if new_resource.uri.start_with?('ppa:')
# build ppa repo file
repository = build_repo(
get_ppa_url(new_resource.uri),
new_resource.distribution,
'main',
new_resource.trusted,
new_resource.arch,
new_resource.deb_src
)
else
# build repo file
repository = build_repo(
new_resource.uri,
new_resource.distribution,
new_resource.components,
new_resource.trusted,
new_resource.arch,
new_resource.deb_src
)
end
file "/etc/apt/sources.list.d/#{new_resource.name}.list" do
owner 'root'
group 'root'
mode 00644
content repository
action :create
notifies :delete, 'file[/var/lib/apt/periodic/update-success-stamp]', :immediately
notifies :run, 'execute[apt-get update]', :immediately if new_resource.cache_rebuild
end
end
action :remove do
if ::File.exist?("/etc/apt/sources.list.d/#{new_resource.name}.list")
Chef::Log.info "Removing #{new_resource.name} repository from /etc/apt/sources.list.d/"
file "/etc/apt/sources.list.d/#{new_resource.name}.list" do
action :delete
end
end
end

View File

@ -0,0 +1,83 @@
#
# Cookbook Name:: apt
# Recipe:: cacher-client
#
# Copyright 2011-2013 Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class ::Chef::Recipe
include ::Apt
end
# remove Acquire::http::Proxy lines from /etc/apt/apt.conf since we use 01proxy
# these are leftover from preseed installs
execute 'Remove proxy from /etc/apt/apt.conf' do
command "sed --in-place '/^Acquire::http::Proxy/d' /etc/apt/apt.conf"
only_if 'grep Acquire::http::Proxy /etc/apt/apt.conf'
end
servers = []
if node['apt']
if node['apt']['cacher_ipaddress']
cacher = Chef::Node.new
cacher.default.name = node['apt']['cacher_ipaddress']
cacher.default.ipaddress = node['apt']['cacher_ipaddress']
cacher.default.apt.cacher_port = node['apt']['cacher_port']
cacher.default.apt.cacher_interface = node['apt']['cacher_interface']
cacher.default.apt.cacher_ssl_support = node['apt']['cacher_ssl_support']
servers << cacher
elsif node['apt']['caching_server']
node.override['apt']['compiletime'] = false
servers << node
end
end
unless Chef::Config[:solo] || servers.length > 0
query = 'apt_caching_server:true'
query += " AND chef_environment:#{node.chef_environment}" if node['apt']['cacher-client']['restrict_environment']
Chef::Log.debug("apt::cacher-client searching for '#{query}'")
servers += search(:node, query)
end
if servers.length > 0
Chef::Log.info("apt-cacher-ng server found on #{servers[0]}.")
if servers[0]['apt']['cacher_interface']
cacher_ipaddress = interface_ipaddress(servers[0], servers[0]['apt']['cacher_interface'])
else
cacher_ipaddress = servers[0].ipaddress
end
t = template '/etc/apt/apt.conf.d/01proxy' do
source '01proxy.erb'
owner 'root'
group 'root'
mode 00644
variables(
:proxy => cacher_ipaddress,
:port => servers[0]['apt']['cacher_port'],
:proxy_ssl => servers[0]['apt']['cacher_ssl_support'],
:bypass => node['apt']['cache_bypass']
)
action(node['apt']['compiletime'] ? :nothing : :create)
notifies :run, 'execute[apt-get update]', :immediately
end
t.run_action(:create) if node['apt']['compiletime']
else
Chef::Log.info('No apt-cacher-ng server found.')
file '/etc/apt/apt.conf.d/01proxy' do
action :delete
end
end
include_recipe 'apt::default'

View File

@ -0,0 +1,43 @@
#
# Cookbook Name:: apt
# Recipe:: cacher-ng
#
# Copyright 2008-2013, Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
node.set['apt']['caching_server'] = true
package 'apt-cacher-ng' do
action :install
end
directory node['apt']['cacher_dir'] do
owner 'apt-cacher-ng'
group 'apt-cacher-ng'
mode 0755
end
template '/etc/apt-cacher-ng/acng.conf' do
source 'acng.conf.erb'
owner 'root'
group 'root'
mode 00644
notifies :restart, 'service[apt-cacher-ng]', :immediately
end
service 'apt-cacher-ng' do
supports :restart => true, :status => false
action [:enable, :start]
end

View File

@ -0,0 +1,112 @@
#
# Cookbook Name:: apt
# Recipe:: default
#
# Copyright 2008-2013, Chef Software, Inc.
# Copyright 2009, Bryan McLellan <btm@loftninjas.org>
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# On systems where apt is not installed, the resources in this recipe are not
# executed. However, they _must_ still be present in the resource collection
# or other cookbooks which notify these resources will fail on non-apt-enabled
# systems.
Chef::Log.debug 'apt is not installed. Apt-specific resources will not be executed.' unless apt_installed?
first_run_file = File.join(Chef::Config[:file_cache_path], 'apt_compile_time_update_first_run')
file '/var/lib/apt/periodic/update-success-stamp' do
owner 'root'
group 'root'
only_if { apt_installed? }
action :nothing
end
# If compile_time_update run apt-get update at compile time
if node['apt']['compile_time_update'] && (!::File.exist?('/var/lib/apt/periodic/update-success-stamp') || !::File.exist?(first_run_file))
e = bash 'apt-get-update at compile time' do
code <<-EOH
apt-get update
touch #{first_run_file}
EOH
ignore_failure true
only_if { apt_installed? }
action :nothing
notifies :touch, 'file[/var/lib/apt/periodic/update-success-stamp]', :immediately
end
e.run_action(:run)
end
# Updates 'apt-get update' timestamp after each update success
directory '/etc/apt/apt.conf.d' do
recursive true
end
cookbook_file '/etc/apt/apt.conf.d/15update-stamp' do
source '15update-stamp'
end
# Run apt-get update to create the stamp file
execute 'apt-get-update' do
command 'apt-get update'
ignore_failure true
only_if { apt_installed? }
not_if { ::File.exist?('/var/lib/apt/periodic/update-success-stamp') }
notifies :touch, 'file[/var/lib/apt/periodic/update-success-stamp]', :immediately
end
# For other recipes to call to force an update
execute 'apt-get update' do
command 'apt-get update'
ignore_failure true
only_if { apt_installed? }
action :nothing
notifies :touch, 'file[/var/lib/apt/periodic/update-success-stamp]', :immediately
end
# Automatically remove packages that are no longer needed for dependencies
execute 'apt-get autoremove' do
command 'apt-get -y autoremove'
only_if { apt_installed? }
action :nothing
end
# Automatically remove .deb files for packages no longer on your system
execute 'apt-get autoclean' do
command 'apt-get -y autoclean'
only_if { apt_installed? }
action :nothing
end
execute 'apt-get-update-periodic' do
command 'apt-get update'
ignore_failure true
only_if do
apt_installed? &&
::File.exist?('/var/lib/apt/periodic/update-success-stamp') &&
::File.mtime('/var/lib/apt/periodic/update-success-stamp') < Time.now - node['apt']['periodic_update_min_delay']
end
notifies :touch, 'file[/var/lib/apt/periodic/update-success-stamp]', :immediately
end
%w(/var/cache/local /var/cache/local/preseeding).each do |dirname|
directory dirname do
owner 'root'
group 'root'
mode 00755
action :create
only_if { apt_installed? }
end
end

View File

@ -0,0 +1,47 @@
#
# Cookbook Name:: apt
# Recipe:: unattended-upgrades
#
# Copyright 2014, Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# On systems where apt is not installed, the resources in this recipe are not
# executed. However, they _must_ still be present in the resource collection
# or other cookbooks which notify these resources will fail on non-apt-enabled
# systems.
#
package 'unattended-upgrades' do
response_file 'unattended-upgrades.seed.erb'
action :install
end
package 'bsd-mailx' do
only_if { node['apt']['unattended_upgrades']['mail'] }
end
template '/etc/apt/apt.conf.d/20auto-upgrades' do
owner 'root'
group 'root'
mode '644'
source '20auto-upgrades.erb'
end
template '/etc/apt/apt.conf.d/50unattended-upgrades' do
owner 'root'
group 'root'
mode '644'
source '50unattended-upgrades.erb'
end

View File

@ -0,0 +1,37 @@
#
# Cookbook Name:: apt
# Resource:: preference
#
# Copyright 2010-2013, Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
actions :add, :remove
default_action :add if defined?(default_action) # Chef > 10.8
# Needed for Chef versions < 0.10.10
def initialize(*args)
super
@action = :add
end
state_attrs :glob,
:package_name,
:pin,
:pin_priority
attribute :package_name, :kind_of => String, :name_attribute => true, :regex => [/^([a-z]|[A-Z]|[0-9]|_|-|\.)+$/]
attribute :glob, :kind_of => String
attribute :pin, :kind_of => String
attribute :pin_priority, :kind_of => String

View File

@ -0,0 +1,55 @@
#
# Cookbook Name:: apt
# Resource:: repository
#
# Copyright 2010-2013, Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
actions :add, :remove
default_action :add if defined?(default_action) # Chef > 10.8
# Needed for Chef versions < 0.10.10
def initialize(*args)
super
@action = :add
end
state_attrs :arch,
:cache_rebuild,
:components,
:cookbook,
:deb_src,
:distribution,
:key,
:keyserver,
:repo_name,
:trusted,
:uri
# name of the repo, used for source.list filename
attribute :repo_name, :kind_of => String, :name_attribute => true, :regex => [/^([a-z]|[A-Z]|[0-9]|_|-|\.)+$/]
attribute :uri, :kind_of => String
attribute :distribution, :kind_of => String
attribute :components, :kind_of => Array, :default => []
attribute :arch, :kind_of => String, :default => nil
attribute :trusted, :kind_of => [TrueClass, FalseClass], :default => false
# whether or not to add the repository as a source repo as well
attribute :deb_src, :default => false
attribute :keyserver, :kind_of => String, :default => nil
attribute :key, :kind_of => String, :default => nil
attribute :cookbook, :kind_of => String, :default => nil
# trigger cache rebuild
# If not you can trigger in the recipe itself after checking the status of resource.updated{_by_last_action}?
attribute :cache_rebuild, :kind_of => [TrueClass, FalseClass], :default => true

View File

@ -0,0 +1,173 @@
# Letter case in directive names does not matter. Must be separated with colons.
# Valid boolean values are a zero number for false, non-zero numbers for true.
CacheDir: <%= node['apt']['cacher_dir'] %>
# set empty to disable logging
LogDir: /var/log/apt-cacher-ng
# TCP (http) port
# Set to 9999 to emulate apt-proxy
Port:<%= node['apt']['cacher_port'] %>
# Addresses or hostnames to listen on. Multiple addresses must be separated by
# spaces. Each entry must be associated with a local interface. DNS resolution
# is performed using getaddrinfo(3) for all available protocols (i.e. IPv4 and
# IPv6 if available).
#
# Default: not set, will listen on all interfaces.
#
# BindAddress: localhost 192.168.7.254 publicNameOnMainInterface
#Proxy: http://www-proxy.example.net:80
#proxy: http://username:proxypassword@proxy.example.net:3128
# Repository remapping. See manual for details.
# In this example, backends file is generated during package installation.
Remap-debrep: file:deb_mirror*.gz /debian ; file:backends_debian
Remap-uburep: file:ubuntu_mirrors /ubuntu ; file:backends_ubuntu
Remap-debvol: file:debvol_mirror*.gz /debian-volatile ; file:backends_debvol
Remap-cygwin: file:cygwin_mirrors /cygwin # ; file:backends_cygwin # incomplete, please create this file
# Virtual page accessible in a web browser to see statistics and status
# information, i.e. under http://localhost:3142/acng-report.html
ReportPage: acng-report.html
# Socket file for accessing through local UNIX socket instead of TCP/IP. Can be
# used with inetd bridge or cron client.
# SocketPath:/var/run/apt-cacher-ng/socket
# Forces log file to be written to disk after every line when set to 1. Default
# is 0, buffer flush happens after client disconnects.
#
# (technically, this is an alias to the Debug option provided for convenience)
#
# UnbufferLogs: 0
# Set to 0 to store only type, time and transfer sizes.
# 1 -> client IP and relative local path are logged too
# VerboseLog: 1
# Don't detach from the console
# ForeGround: 0
# Store the pid of the daemon process therein
# PidFile: /var/run/apt-cacher-ng/pid
# Forbid outgoing connections, work around them or respond with 503 error
# offlinemode:0
# Forbid all downloads that don't run through preconfigured backends (.where)
#ForceManaged: 0
# Days before considering an unreferenced file expired (to be deleted).
# Warning: if the value is set too low and particular index files are not
# available for some days (mirror downtime) there is a risk of deletion of
# still usefull package files.
ExTreshold: 4
# Stop expiration when a critical problem appeared. Currently only failed
# refresh of an index file is considered as critical.
#
# WARNING: don't touch this option or set to a non-zero number.
# Anything else is DANGEROUS and may cause data loss.
#
# ExAbortOnProblems: 1
# Replace some Windows/DOS-FS incompatible chars when storing
# StupidFs: 0
# Experimental feature for apt-listbugs: pass-through SOAP requests and
# responses to/from bugs.debian.org. If not set, default is true if
# ForceManaged is enabled and false otherwise.
# ForwardBtsSoap: 1
# The daemon has a small cache for DNS data, to speed up resolution. The
# expiration time of the DNS entries can be configured in seconds.
# DnsCacheSeconds: 3600
# Don't touch the following values without good consideration!
#
# Max. count of connection threads kept ready (for faster response in the
# future). Should be a sane value between 0 and average number of connections,
# and depend on the amount of spare RAM.
# MaxStandbyConThreads: 8
#
# Hard limit of active thread count for incomming connections, i.e. operation
# is refused when this value is reached (below zero = unlimited).
# MaxConThreads: -1
#
#VfilePattern = (^|.*?/)(Index|Packages\.bz2|Packages\.gz|Packages|Release|Release\.gpg|Sources\.bz2|Sources\.gz|Sources|release|index\.db-.*\.gz|Contents-[^/]*\.gz|pkglist[^/]*\.bz2|rclist[^/]*\.bz2|/meta-release[^/]*|Translation[^/]*\.bz2)$
#PfilePattern = .*(\.deb|\.rpm|\.dsc|\.tar\.gz\.gpg|\.tar\.gz|\.diff\.gz|\.diff\.bz2|\.jigdo|\.template|changelog|copyright|\.udeb|\.diff/.*\.gz|vmlinuz|initrd\.gz|(Devel)?ReleaseAnnouncement(\\?.*)?)$
# Whitelist for expiration, file types not to be removed even when being
# unreferenced. Default: same as VfilePattern which is a safe bed. When and
# only when the only used mirrors are official repositories (with working
# Release files) then it might be set to something more restrictive, like
# (^|.*?/)(Release|Release\.gpg|release|meta-release|Translation[^/]*\.bz2)$
#WfilePattern = (^|.*?/)(Index|Packages\.bz2|Packages\.gz|Packages|Release|Release\.gpg|Sources\.bz2|Sources\.gz|Sources|release|index\.db-.*\.gz|Contents-[^/]*\.gz|pkglist[^/]*\.bz2|rclist[^/]*\.bz2|/meta-release[^/]*|Translation[^/]*\.bz2)$
# Higher modes only working with the debug version
# Warning, writes a lot into apt-cacher.err logfile
# Value overwrites UnbufferLogs setting (aliased)
# Debug:3
# Usually, general purpose proxies like Squid expose the IP adress of the
# client user to the remote server using the X-Forwarded-For HTTP header. This
# behaviour can be optionally turned on with the Expose-Origin option.
# ExposeOrigin: 0
# When logging the originating IP address, trust the information supplied by
# the client in the X-Forwarded-For header.
# LogSubmittedOrigin: 0
# The version string reported to the peer, to be displayed as HTTP client (and
# version) in the logs of the mirror.
# WARNING: some archives use this header to detect/guess capabilities of the
# client (i.e. redirection support) and change the behaviour accordingly, while
# ACNG might not support the expected features. Expect side effects.
#
# UserAgent: Yet Another HTTP Client/1.2.3p4
# In some cases the Import and Expiration tasks might create fresh volatile
# data for internal use by reconstructing them using patch files. This
# by-product might be recompressed with bzip2 and with some luck the resulting
# file becomes identical to the *.bz2 file on the server, usable for APT
# clients trying to fetch the full .bz2 compressed version. Injection of the
# generated files into the cache has however a disadvantage on underpowered
# servers: bzip2 compession can create high load on the server system and the
# visible download of the busy .bz2 files also becomes slower.
#
# RecompBz2: 0
# Network timeout for outgoing connections.
# NetworkTimeout: 60
# Sometimes it makes sense to not store the data in cache and just return the
# package data to client as it comes in. DontCache parameters can enable this
# behaviour for certain URL types. The tokens are extended regular expressions
# that URLs are matched against.
#
# DontCacheRequested is applied to the URL as it comes in from the client.
# Example: exclude packages built with kernel-package for x86
# DontCacheRequested: linux-.*_10\...\.Custo._i386
# Example usecase: exclude popular private IP ranges from caching
# DontCacheRequested: 192.168.0 ^10\..* 172.30
#
# DontCacheResolved is applied to URLs after mapping to the target server. If
# multiple backend servers are specified then it's only matched against the
# download link for the FIRST possible source (due to implementation limits).
# Example usecase: all Ubuntu stuff comes from a local mirror (specified as
# backend), don't cache it again:
# DontCacheResolved: ubuntumirror.local.net
#
# DontCache directive sets (overrides) both, DontCacheResolved and
# DontCacheRequested. Provided for convenience, see those directives for
# details.
#
# Default permission set of freshly created files and directories, as octal
# numbers (see chmod(1) for details).
# Can by limited by the umask value (see umask(2) for details) if it's set in
# the environment of the starting shell, e.g. in apt-cacher-ng init script or
# in its configuration file.
# DirPerms: 00755
# FilePerms: 00664

View File

@ -0,0 +1,9 @@
Acquire::http::Proxy "http://<%= @proxy %>:<%= @port %>";
<% if @proxy_ssl %>
Acquire::https::Proxy "http://<%= @proxy %>:<%= @port %>";
<% else %>
Acquire::https::Proxy "DIRECT";
<% end %>
<% @bypass.each do |bypass, type| %>
Acquire::<%= type %>::Proxy::<%= bypass %> "DIRECT";
<% end %>

View File

@ -0,0 +1,2 @@
APT::Periodic::Update-Package-Lists "<%= node['apt']['unattended_upgrades']['update_package_lists'] ? 1 : 0 %>";
APT::Periodic::Unattended-Upgrade "<%= node['apt']['unattended_upgrades']['enable'] ? 1 : 0 %>";

View File

@ -0,0 +1,68 @@
// Automatically upgrade packages from these (origin:archive) pairs
Unattended-Upgrade::Allowed-Origins {
<% unless node['apt']['unattended_upgrades']['allowed_origins'].empty? -%>
<% node['apt']['unattended_upgrades']['allowed_origins'].each do |origin| -%>
"<%= origin %>";
<% end -%>
<% end -%>
};
// List of packages to not update
Unattended-Upgrade::Package-Blacklist {
<% unless node['apt']['unattended_upgrades']['package_blacklist'].empty? -%>
<% node['apt']['unattended_upgrades']['package_blacklist'].each do |package| -%>
"<%= package %>";
<% end -%>
<% end -%>
};
// This option allows you to control if on a unclean dpkg exit
// unattended-upgrades will automatically run
// dpkg --force-confold --configure -a
// The default is true, to ensure updates keep getting installed
Unattended-Upgrade::AutoFixInterruptedDpkg "<%= node['apt']['unattended_upgrades']['auto_fix_interrupted_dpkg'] ? 'true' : 'false' %>";
// Split the upgrade into the smallest possible chunks so that
// they can be interrupted with SIGUSR1. This makes the upgrade
// a bit slower but it has the benefit that shutdown while a upgrade
// is running is possible (with a small delay)
Unattended-Upgrade::MinimalSteps "<%= node['apt']['unattended_upgrades']['minimal_steps'] ? 'true' : 'false' %>";
// Install all unattended-upgrades when the machine is shuting down
// instead of doing it in the background while the machine is running
// This will (obviously) make shutdown slower
Unattended-Upgrade::InstallOnShutdown "<%= node['apt']['unattended_upgrades']['install_on_shutdown'] ? 'true' : 'false' %>";
// Send email to this address for problems or packages upgrades
// If empty or unset then no email is sent, make sure that you
// have a working mail setup on your system. A package that provides
// 'mailx' must be installed.
<% if node['apt']['unattended_upgrades']['mail'] -%>
Unattended-Upgrade::Mail "<%= node['apt']['unattended_upgrades']['mail'] %>";
<% end -%>
// Set this value to "true" to get emails only on errors. Default
// is to always send a mail if Unattended-Upgrade::Mail is set
Unattended-Upgrade::MailOnlyOnError "<%= node['apt']['unattended_upgrades']['mail_only_on_error'] ? 'true' : 'false' %>";
// Do automatic removal of new unused dependencies after the upgrade
// (equivalent to apt-get autoremove)
Unattended-Upgrade::Remove-Unused-Dependencies "<%= node['apt']['unattended_upgrades']['remove_unused_dependencies'] ? 'true' : 'false' %>";
// Automatically reboot *WITHOUT CONFIRMATION* if a
// the file /var/run/reboot-required is found after the upgrade
Unattended-Upgrade::Automatic-Reboot "<%= node['apt']['unattended_upgrades']['automatic_reboot'] ? 'true' : 'false' %>";
// If automatic reboot is enabled and needed, reboot at the specific
// time instead of immediately
// Default: "now"
<% if node['apt']['unattended_upgrades']['automatic_reboot'] -%>
Unattended-Upgrade::Automatic-Reboot-Time "<%= node['apt']['unattended_upgrades']['automatic_reboot_time'] %>";
<% end %>
// Use apt bandwidth limit feature, this example limits the download
// speed to 70kb/sec
<% if node['apt']['unattended_upgrades']['dl_limit'] -%>
Acquire::http::Dl-Limit "<%= node['apt']['unattended_upgrades']['dl_limit'] %>";
<% end -%>

View File

@ -0,0 +1,275 @@
# Letter case in directive names does not matter. Must be separated with colons.
# Valid boolean values are a zero number for false, non-zero numbers for true.
CacheDir: <%= node['apt']['cacher_dir'] %>
# set empty to disable logging
LogDir: /var/log/apt-cacher-ng
# place to look for additional configuration and resource files if they are not
# found in the configuration directory
# SupportDir: /usr/lib/apt-cacher-ng
# TCP (http) port
# Set to 9999 to emulate apt-proxy
Port:<%= node['apt']['cacher_port'] %>
# Addresses or hostnames to listen on. Multiple addresses must be separated by
# spaces. Each entry must be an exact local address which is associated with a
# local interface. DNS resolution is performed using getaddrinfo(3) for all
# available protocols (IPv4, IPv6, ...). Using a protocol specific format will
# create binding(s) only on protocol specific socket(s) (e.g. 0.0.0.0 will listen
# only to IPv4).
#
# Default: not set, will listen on all interfaces and protocols
#
# BindAddress: localhost 192.168.7.254 publicNameOnMainInterface
# The specification of another proxy which shall be used for downloads.
# Username and password are, and see manual for limitations.
#
#Proxy: http://www-proxy.example.net:80
#proxy: username:proxypassword@proxy.example.net:3128
# Repository remapping. See manual for details.
# In this example, some backends files might be generated during package
# installation using information collected on the system.
Remap-debrep: file:deb_mirror*.gz /debian ; file:backends_debian # Debian Archives
Remap-uburep: file:ubuntu_mirrors /ubuntu ; file:backends_ubuntu # Ubuntu Archives
Remap-debvol: file:debvol_mirror*.gz /debian-volatile ; file:backends_debvol # Debian Volatile Archives
Remap-cygwin: file:cygwin_mirrors /cygwin # ; file:backends_cygwin # incomplete, please create this file or specify preferred mirrors here
Remap-sfnet: file:sfnet_mirrors # ; file:backends_sfnet # incomplete, please create this file or specify preferred mirrors here
Remap-alxrep: file:archlx_mirrors /archlinux # ; file:backend_archlx # Arch Linux
Remap-fedora: file:fedora_mirrors # Fedora Linux
Remap-epel: file:epel_mirrors # Fedora EPEL
Remap-slrep: file:sl_mirrors # Scientific Linux
# This is usually not needed for security.debian.org because it's always the
# same DNS hostname. However, it might be enabled in order to use hooks,
# ForceManaged mode or special flags in this context.
# Remap-secdeb: security.debian.org
# Virtual page accessible in a web browser to see statistics and status
# information, i.e. under http://localhost:3142/acng-report.html
ReportPage: acng-report.html
# Socket file for accessing through local UNIX socket instead of TCP/IP. Can be
# used with inetd bridge or cron client.
# SocketPath:/var/run/apt-cacher-ng/socket
# Forces log file to be written to disk after every line when set to 1. Default
# is 0, buffers are flushed when the client disconnects.
#
# (technically, alias to the Debug option, see its documentation for details)
#
# UnbufferLogs: 0
# Set to 0 to store only type, time and transfer sizes.
# 1 -> client IP and relative local path are logged too
# VerboseLog: 1
# Don't detach from the console
# ForeGround: 0
# Store the pid of the daemon process therein
# PidFile: /var/run/apt-cacher-ng/pid
# Forbid outgoing connections, work around them or respond with 503 error
# offlinemode:0
# Forbid all downloads that don't run through preconfigured backends (.where)
#ForceManaged: 0
# Days before considering an unreferenced file expired (to be deleted).
# Warning: if the value is set too low and particular index files are not
# available for some days (mirror downtime) there is a risk of deletion of
# still useful package files.
ExTreshold: 4
# Stop expiration when a critical problem appeared. Currently only failed
# refresh of an index file is considered as critical.
#
# WARNING: don't touch this option or set to zero.
# Anything else is DANGEROUS and may cause data loss.
#
# ExAbortOnProblems: 1
# Replace some Windows/DOS-FS incompatible chars when storing
# StupidFs: 0
# Experimental feature for apt-listbugs: pass-through SOAP requests and
# responses to/from bugs.debian.org. If not set, default is true if
# ForceManaged is enabled and false otherwise.
# ForwardBtsSoap: 1
# The daemon has a small cache for DNS data, to speed up resolution. The
# expiration time of the DNS entries can be configured in seconds.
# DnsCacheSeconds: 3600
# Don't touch the following values without good consideration!
#
# Max. count of connection threads kept ready (for faster response in the
# future). Should be a sane value between 0 and average number of connections,
# and depend on the amount of spare RAM.
# MaxStandbyConThreads: 8
#
# Hard limit of active thread count for incoming connections, i.e. operation
# is refused when this value is reached (below zero = unlimited).
# MaxConThreads: -1
#
# Pigeonholing files with regular expressions (static/volatile). Can be
# overriden here but not should not be done permanently because future update
# of default settings would not be applied later.
# VfilePattern = (^|.*?/)(Index|Packages(\.gz|\.bz2|\.lzma|\.xz)?|InRelease|Release|Release\.gpg|Sources(\.gz|\.bz2|\.lzma|\.xz)?|release|index\.db-.*\.gz|Contents-[^/]*(\.gz|\.bz2|\.lzma|\.xz)?|pkglist[^/]*\.bz2|rclist[^/]*\.bz2|/meta-release[^/]*|Translation[^/]*(\.gz|\.bz2|\.lzma|\.xz)?|MD5SUMS|SHA1SUMS|((setup|setup-legacy)(\.ini|\.bz2|\.hint)(\.sig)?)|mirrors\.lst|repo(index|md)\.xml(\.asc|\.key)?|directory\.yast|products|content(\.asc|\.key)?|media|filelists\.xml\.gz|filelists\.sqlite\.bz2|repomd\.xml|packages\.[a-zA-Z][a-zA-Z]\.gz|info\.txt|license\.tar\.gz|license\.zip|.*\.db(\.tar\.gz)?|.*\.files\.tar\.gz|.*\.abs\.tar\.gz|metalink\?repo|.*prestodelta\.xml\.gz)$|/dists/.*/installer-[^/]+/[^0-9][^/]+/images/.*
# PfilePattern = .*(\.d?deb|\.rpm|\.dsc|\.tar(\.gz|\.bz2|\.lzma|\.xz)(\.gpg)?|\.diff(\.gz|\.bz2|\.lzma|\.xz)|\.jigdo|\.template|changelog|copyright|\.udeb|\.debdelta|\.diff/.*\.gz|(Devel)?ReleaseAnnouncement(\?.*)?|[a-f0-9]+-(susedata|updateinfo|primary|deltainfo).xml.gz|fonts/(final/)?[a-z]+32.exe(\?download.*)?|/dists/.*/installer-[^/]+/[0-9][^/]+/images/.*)$
# Whitelist for expiration, file types not to be removed even when being
# unreferenced. Default: many parts from VfilePattern where no parent index
# exists or might be unknown.
# WfilePattern = (^|.*?/)(Release|InRelease|Release\.gpg|(Packages|Sources)(\.gz|\.bz2|\.lzma|\.xz)?|Translation[^/]*(\.gz|\.bz2|\.lzma|\.xz)?|MD5SUMS|SHA1SUMS|.*\.xml|.*\.db\.tar\.gz|.*\.files\.tar\.gz|.*\.abs\.tar\.gz|[a-z]+32.exe)$|/dists/.*/installer-.*/images/.*
# Higher modes only working with the debug version
# Warning, writes a lot into apt-cacher.err logfile
# Value overwrites UnbufferLogs setting (aliased)
# Debug:3
# Usually, general purpose proxies like Squid expose the IP address of the
# client user to the remote server using the X-Forwarded-For HTTP header. This
# behaviour can be optionally turned on with the Expose-Origin option.
# ExposeOrigin: 0
# When logging the originating IP address, trust the information supplied by
# the client in the X-Forwarded-For header.
# LogSubmittedOrigin: 0
# The version string reported to the peer, to be displayed as HTTP client (and
# version) in the logs of the mirror.
# WARNING: some archives use this header to detect/guess capabilities of the
# client (i.e. redirection support) and change the behaviour accordingly, while
# ACNG might not support the expected features. Expect side effects.
#
# UserAgent: Yet Another HTTP Client/1.2.3p4
# In some cases the Import and Expiration tasks might create fresh volatile
# data for internal use by reconstructing them using patch files. This
# by-product might be recompressed with bzip2 and with some luck the resulting
# file becomes identical to the *.bz2 file on the server, usable for APT
# clients trying to fetch the full .bz2 compressed version. Injection of the
# generated files into the cache has however a disadvantage on underpowered
# servers: bzip2 compression can create high load on the server system and the
# visible download of the busy .bz2 files also becomes slower.
#
# RecompBz2: 0
# Network timeout for outgoing connections.
# NetworkTimeout: 60
# Sometimes it makes sense to not store the data in cache and just return the
# package data to client as it comes in. DontCache parameters can enable this
# behaviour for certain URL types. The tokens are extended regular expressions
# that URLs are matched against.
#
# DontCacheRequested is applied to the URL as it comes in from the client.
# Example: exclude packages built with kernel-package for x86
# DontCacheRequested: linux-.*_10\...\.Custo._i386
# Example usecase: exclude popular private IP ranges from caching
# DontCacheRequested: 192.168.0 ^10\..* 172.30
#
# DontCacheResolved is applied to URLs after mapping to the target server. If
# multiple backend servers are specified then it's only matched against the
# download link for the FIRST possible source (due to implementation limits).
# Example usecase: all Ubuntu stuff comes from a local mirror (specified as
# backend), don't cache it again:
# DontCacheResolved: ubuntumirror.local.net
#
# DontCache directive sets (overrides) both, DontCacheResolved and
# DontCacheRequested. Provided for convenience, see those directives for
# details.
#
# Default permission set of freshly created files and directories, as octal
# numbers (see chmod(1) for details).
# Can by limited by the umask value (see umask(2) for details) if it's set in
# the environment of the starting shell, e.g. in apt-cacher-ng init script or
# in its configuration file.
# DirPerms: 00755
# FilePerms: 00664
#
#
# It's possible to use use apt-cacher-ng as a regular web server with limited
# feature set, i.e.
# including directory browsing and download of any file;
# excluding sorting, mime types/encodings, CGI execution, index page
# redirection and other funny things.
# To get this behavior, mappings between virtual directories and real
# directories on the server must be defined with the LocalDirs directive.
# Virtual and real dirs are separated by spaces, multiple pairs are separated
# by semi-colons. Real directories must be absolute paths.
# NOTE: Since the names of that key directories share the same namespace as
# repository names (see Remap-...) it's administrators job to avoid such
# collisions on them (unless created deliberately).
#
# LocalDirs: woo /data/debarchive/woody ; hamm /data/debarchive/hamm
# Precache a set of files referenced by specified index files. This can be used
# to create a partial mirror usable for offline work. There are certain limits
# and restrictions on the path specification, see manual for details. A list of
# (maybe) relevant index files could be retrieved via
# "apt-get --print-uris update" on a client machine.
#
# PrecacheFor: debrep/dists/unstable/*/source/Sources* debrep/dists/unstable/*/binary-amd64/Packages*
# Arbitrary set of data to append to request headers sent over the wire. Should
# be a well formated HTTP headers part including newlines (DOS style) which
# can be entered as escape sequences (\r\n).
# RequestAppendix: X-Tracking-Choice: do-not-track\r\n
# Specifies the IP protocol families to use for remote connections. Order does
# matter, first specified are considered first. Possible combinations:
# v6 v4
# v4 v6
# v6
# v4
# (empty or not set: use system default)
#
# ConnectProto: v6 v4
# Regular expiration algorithm finds package files which are no longer listed
# in any index file and removes them of them after a safety period.
# This option allows to keep more versions of a package in the cache after
# safety period is over.
# KeepExtraVersions: 1
# Optionally uses TCP access control provided by libwrap, see hosts_access(5)
# for details. Daemon name is apt-cacher-ng. Default if not set: decided on
# startup by looking for explicit mentioning of apt-cacher-ng in
# /etc/hosts.allow or /etc/hosts.deny files.
# UseWrap: 0
# If many machines from the same local network attempt to update index files
# (apt-get update) at nearly the same time, the known state of these index file
# is temporarily frozen and multiple requests receive the cached response
# without contacting the server. This parameter (in seconds) specifies the
# length of this period before the files are considered outdated.
# Setting it too low transfers more data and increases remote server load,
# setting it too high (more than a couple of minutes) increases the risk of
# delivering inconsistent responses to the clients.
# FreshIndexMaxAge: 27
# Usually the users are not allowed to specify custom TCP ports of remote
# mirrors in the requests, only the default HTTP port can be used (instead,
# proxy administrator can create Remap- rules with custom ports). This
# restriction can be disabled by specifying a list of allowed ports or 0 for
# any port.
#
# AllowUserPorts: 80
# Normally the HTTP redirection responses are forwarded to the original caller
# (i.e. APT) which starts a new download attempt from the new URL. This
# solution is ok for client configurations with proxy mode but doesn't work
# well with configurations using URL prefixes. To work around this the server
# can restart its own download with another URL. However, this might be used to
# circumvent download source policies by malicious users.
# The RedirMax option specifies how many such redirects the server should
# follow per request, 0 disables the internal redirection. If not set,
# default value is 0 if ForceManaged is used and 5 otherwise.
#
# RedirMax: 5

View File

@ -0,0 +1 @@
unattended-upgrades unattended-upgrades/enable_auto_updates boolean <%= node['apt']['unattended_upgrades']['enable'] ? 'true' : 'false' %>

View File

@ -0,0 +1,269 @@
# Letter case in directive names does not matter. Must be separated with colons.
# Valid boolean values are a zero number for false, non-zero numbers for true.
CacheDir: <%= node['apt']['cacher_dir'] %>
# set empty to disable logging
LogDir: /var/log/apt-cacher-ng
# place to look for additional configuration and resource files if they are not
# found in the configuration directory
# SupportDir: /usr/lib/apt-cacher-ng
# TCP (http) port
# Set to 9999 to emulate apt-proxy
Port:<%= node['apt']['cacher_port'] %>
# Addresses or hostnames to listen on. Multiple addresses must be separated by
# spaces. Each entry must be an exact local address which is associated with a
# local interface. DNS resolution is performed using getaddrinfo(3) for all
# available protocols (IPv4, IPv6, ...). Using a protocol specific format will
# create binding(s) only on protocol specific socket(s) (e.g. 0.0.0.0 will listen
# only to IPv4).
#
# Default: not set, will listen on all interfaces and protocols
#
# BindAddress: localhost 192.168.7.254 publicNameOnMainInterface
# The specification of another proxy which shall be used for downloads.
# Username and password are, and see manual for limitations.
#
#Proxy: http://www-proxy.example.net:80
#proxy: username:proxypassword@proxy.example.net:3128
# Repository remapping. See manual for details.
# In this example, some backends files might be generated during package
# installation using information collected on the system.
Remap-debrep: file:deb_mirror*.gz /debian ; file:backends_debian # Debian Archives
Remap-uburep: file:ubuntu_mirrors /ubuntu ; file:backends_ubuntu # Ubuntu Archives
Remap-debvol: file:debvol_mirror*.gz /debian-volatile ; file:backends_debvol # Debian Volatile Archives
# This is usually not needed for security.debian.org because it's always the
# same DNS hostname. However, it might be enabled in order to use hooks,
# ForceManaged mode or special flags in this context.
# Remap-secdeb: security.debian.org
# Virtual page accessible in a web browser to see statistics and status
# information, i.e. under http://localhost:3142/acng-report.html
ReportPage: acng-report.html
# Socket file for accessing through local UNIX socket instead of TCP/IP. Can be
# used with inetd bridge or cron client.
# SocketPath:/var/run/apt-cacher-ng/socket
# Forces log file to be written to disk after every line when set to 1. Default
# is 0, buffers are flushed when the client disconnects.
#
# (technically, alias to the Debug option, see its documentation for details)
#
# UnbufferLogs: 0
# Set to 0 to store only type, time and transfer sizes.
# 1 -> client IP and relative local path are logged too
# VerboseLog: 1
# Don't detach from the console
# ForeGround: 0
# Store the pid of the daemon process therein
# PidFile: /var/run/apt-cacher-ng/pid
# Forbid outgoing connections, work around them or respond with 503 error
# offlinemode:0
# Forbid all downloads that don't run through preconfigured backends (.where)
#ForceManaged: 0
# Days before considering an unreferenced file expired (to be deleted).
# Warning: if the value is set too low and particular index files are not
# available for some days (mirror downtime) there is a risk of deletion of
# still useful package files.
ExTreshold: 4
# Stop expiration when a critical problem appeared. Currently only failed
# refresh of an index file is considered as critical.
#
# WARNING: don't touch this option or set to zero.
# Anything else is DANGEROUS and may cause data loss.
#
# ExAbortOnProblems: 1
# Replace some Windows/DOS-FS incompatible chars when storing
# StupidFs: 0
# Experimental feature for apt-listbugs: pass-through SOAP requests and
# responses to/from bugs.debian.org. If not set, default is true if
# ForceManaged is enabled and false otherwise.
# ForwardBtsSoap: 1
# The daemon has a small cache for DNS data, to speed up resolution. The
# expiration time of the DNS entries can be configured in seconds.
# DnsCacheSeconds: 3600
# Don't touch the following values without good consideration!
#
# Max. count of connection threads kept ready (for faster response in the
# future). Should be a sane value between 0 and average number of connections,
# and depend on the amount of spare RAM.
# MaxStandbyConThreads: 8
#
# Hard limit of active thread count for incoming connections, i.e. operation
# is refused when this value is reached (below zero = unlimited).
# MaxConThreads: -1
#
# Pigeonholing files with regular expressions (static/volatile). Can be
# overriden here but not should not be done permanently because future update
# of default settings would not be applied later.
# VfilePattern = (^|.*?/)(Index|Packages(\.gz|\.bz2|\.lzma|\.xz)?|InRelease|Release|Release\.gpg|Sources(\.gz|\.bz2|\.lzma|\.xz)?|release|index\.db-.*\.gz|Contents-[^/]*(\.gz|\.bz2|\.lzma|\.xz)?|pkglist[^/]*\.bz2|rclist[^/]*\.bz2|/meta-release[^/]*|Translation[^/]*(\.gz|\.bz2|\.lzma|\.xz)?|MD5SUMS|SHA1SUMS|((setup|setup-legacy)(\.ini|\.bz2|\.hint)(\.sig)?)|mirrors\.lst|repo(index|md)\.xml(\.asc|\.key)?|directory\.yast|products|content(\.asc|\.key)?|media|filelists\.xml\.gz|filelists\.sqlite\.bz2|repomd\.xml|packages\.[a-zA-Z][a-zA-Z]\.gz|info\.txt|license\.tar\.gz|license\.zip|.*\.db(\.tar\.gz)?|.*\.files\.tar\.gz|.*\.abs\.tar\.gz|metalink\?repo|.*prestodelta\.xml\.gz)$|/dists/.*/installer-[^/]+/[^0-9][^/]+/images/.*
# PfilePattern = .*(\.d?deb|\.rpm|\.dsc|\.tar(\.gz|\.bz2|\.lzma|\.xz)(\.gpg)?|\.diff(\.gz|\.bz2|\.lzma|\.xz)|\.jigdo|\.template|changelog|copyright|\.udeb|\.debdelta|\.diff/.*\.gz|(Devel)?ReleaseAnnouncement(\?.*)?|[a-f0-9]+-(susedata|updateinfo|primary|deltainfo).xml.gz|fonts/(final/)?[a-z]+32.exe(\?download.*)?|/dists/.*/installer-[^/]+/[0-9][^/]+/images/.*)$
# Whitelist for expiration, file types not to be removed even when being
# unreferenced. Default: many parts from VfilePattern where no parent index
# exists or might be unknown.
# WfilePattern = (^|.*?/)(Release|InRelease|Release\.gpg|(Packages|Sources)(\.gz|\.bz2|\.lzma|\.xz)?|Translation[^/]*(\.gz|\.bz2|\.lzma|\.xz)?|MD5SUMS|SHA1SUMS|.*\.xml|.*\.db\.tar\.gz|.*\.files\.tar\.gz|.*\.abs\.tar\.gz|[a-z]+32.exe)$|/dists/.*/installer-.*/images/.*
# Higher modes only working with the debug version
# Warning, writes a lot into apt-cacher.err logfile
# Value overwrites UnbufferLogs setting (aliased)
# Debug:3
# Usually, general purpose proxies like Squid expose the IP address of the
# client user to the remote server using the X-Forwarded-For HTTP header. This
# behaviour can be optionally turned on with the Expose-Origin option.
# ExposeOrigin: 0
# When logging the originating IP address, trust the information supplied by
# the client in the X-Forwarded-For header.
# LogSubmittedOrigin: 0
# The version string reported to the peer, to be displayed as HTTP client (and
# version) in the logs of the mirror.
# WARNING: some archives use this header to detect/guess capabilities of the
# client (i.e. redirection support) and change the behaviour accordingly, while
# ACNG might not support the expected features. Expect side effects.
#
# UserAgent: Yet Another HTTP Client/1.2.3p4
# In some cases the Import and Expiration tasks might create fresh volatile
# data for internal use by reconstructing them using patch files. This
# by-product might be recompressed with bzip2 and with some luck the resulting
# file becomes identical to the *.bz2 file on the server, usable for APT
# clients trying to fetch the full .bz2 compressed version. Injection of the
# generated files into the cache has however a disadvantage on underpowered
# servers: bzip2 compression can create high load on the server system and the
# visible download of the busy .bz2 files also becomes slower.
#
# RecompBz2: 0
# Network timeout for outgoing connections.
# NetworkTimeout: 60
# Sometimes it makes sense to not store the data in cache and just return the
# package data to client as it comes in. DontCache parameters can enable this
# behaviour for certain URL types. The tokens are extended regular expressions
# that URLs are matched against.
#
# DontCacheRequested is applied to the URL as it comes in from the client.
# Example: exclude packages built with kernel-package for x86
# DontCacheRequested: linux-.*_10\...\.Custo._i386
# Example usecase: exclude popular private IP ranges from caching
# DontCacheRequested: 192.168.0 ^10\..* 172.30
#
# DontCacheResolved is applied to URLs after mapping to the target server. If
# multiple backend servers are specified then it's only matched against the
# download link for the FIRST possible source (due to implementation limits).
# Example usecase: all Ubuntu stuff comes from a local mirror (specified as
# backend), don't cache it again:
# DontCacheResolved: ubuntumirror.local.net
#
# DontCache directive sets (overrides) both, DontCacheResolved and
# DontCacheRequested. Provided for convenience, see those directives for
# details.
#
# Default permission set of freshly created files and directories, as octal
# numbers (see chmod(1) for details).
# Can by limited by the umask value (see umask(2) for details) if it's set in
# the environment of the starting shell, e.g. in apt-cacher-ng init script or
# in its configuration file.
# DirPerms: 00755
# FilePerms: 00664
#
#
# It's possible to use use apt-cacher-ng as a regular web server with limited
# feature set, i.e.
# including directory browsing and download of any file;
# excluding sorting, mime types/encodings, CGI execution, index page
# redirection and other funny things.
# To get this behavior, mappings between virtual directories and real
# directories on the server must be defined with the LocalDirs directive.
# Virtual and real dirs are separated by spaces, multiple pairs are separated
# by semi-colons. Real directories must be absolute paths.
# NOTE: Since the names of that key directories share the same namespace as
# repository names (see Remap-...) it's administrators job to avoid such
# collisions on them (unless created deliberately).
#
# LocalDirs: woo /data/debarchive/woody ; hamm /data/debarchive/hamm
# Precache a set of files referenced by specified index files. This can be used
# to create a partial mirror usable for offline work. There are certain limits
# and restrictions on the path specification, see manual for details. A list of
# (maybe) relevant index files could be retrieved via
# "apt-get --print-uris update" on a client machine.
#
# PrecacheFor: debrep/dists/unstable/*/source/Sources* debrep/dists/unstable/*/binary-amd64/Packages*
# Arbitrary set of data to append to request headers sent over the wire. Should
# be a well formated HTTP headers part including newlines (DOS style) which
# can be entered as escape sequences (\r\n).
# RequestAppendix: X-Tracking-Choice: do-not-track\r\n
# Specifies the IP protocol families to use for remote connections. Order does
# matter, first specified are considered first. Possible combinations:
# v6 v4
# v4 v6
# v6
# v4
# (empty or not set: use system default)
#
# ConnectProto: v6 v4
# Regular expiration algorithm finds package files which are no longer listed
# in any index file and removes them of them after a safety period.
# This option allows to keep more versions of a package in the cache after
# safety period is over.
# KeepExtraVersions: 1
# Optionally uses TCP access control provided by libwrap, see hosts_access(5)
# for details. Daemon name is apt-cacher-ng. Default if not set: decided on
# startup by looking for explicit mentioning of apt-cacher-ng in
# /etc/hosts.allow or /etc/hosts.deny files.
# UseWrap: 0
# If many machines from the same local network attempt to update index files
# (apt-get update) at nearly the same time, the known state of these index file
# is temporarily frozen and multiple requests receive the cached response
# without contacting the server. This parameter (in seconds) specifies the
# length of this period before the files are considered outdated.
# Setting it too low transfers more data and increases remote server load,
# setting it too high (more than a couple of minutes) increases the risk of
# delivering inconsistent responses to the clients.
# FreshIndexMaxAge: 27
# Usually the users are not allowed to specify custom TCP ports of remote
# mirrors in the requests, only the default HTTP port can be used (instead,
# proxy administrator can create Remap- rules with custom ports). This
# restriction can be disabled by specifying a list of allowed ports or 0 for
# any port.
#
# AllowUserPorts: 80
# Normally the HTTP redirection responses are forwarded to the original caller
# (i.e. APT) which starts a new download attempt from the new URL. This
# solution is ok for client configurations with proxy mode but doesn't work
# well with configurations using URL prefixes. To work around this the server
# can restart its own download with another URL. However, this might be used to
# circumvent download source policies by malicious users.
# The RedirMax option specifies how many such redirects the server should
# follow per request, 0 disables the internal redirection. If not set,
# default value is 0 if ForceManaged is used and 5 otherwise.
#
# RedirMax: 5